diff --git a/action.yml b/action.yml index 1eac87a..bf73289 100644 --- a/action.yml +++ b/action.yml @@ -54,7 +54,7 @@ outputs: cache-hit: description: 'A boolean value to indicate an exact match was found for the primary key' runs: - using: 'node16' + using: 'node20' main: 'dist/main/index.js' post: 'dist/cleanup/index.js' post-if: 'success()' diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index c6200c0..ea401f3 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -283,8 +283,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", ({ value: true })); exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(1825); -const auth_1 = __nccwpck_require__(2001); +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(7147)); const url_1 = __nccwpck_require__(7310); @@ -319,7 +319,8 @@ function createHttpClient() { return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); } function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { - const components = paths; + // don't pass changes upstream + const components = paths.slice(); // Add compression method to cache version to restore // compressed cache as per compression method if (compressionMethod) { @@ -331,10 +332,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { @@ -387,13 +385,21 @@ function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); @@ -426,9 +432,7 @@ function getContentRange(start, end) { } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) @@ -596,17 +600,19 @@ function getArchiveFileSizeInBytes(filePath) { } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { - var e_1, _a; - var _b; + var _a, e_1, _b, _c; + var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { + _c = _g.value; + _e = false; + const file = _c; const relativeFile = path .relative(workspace, file) .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); @@ -624,7 +630,7 @@ function resolvePaths(patterns) { catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } @@ -719,7 +725,7 @@ var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Zstd"] = "cache.tzst"; -})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); +})(CacheFilename || (exports.CacheFilename = CacheFilename = {})); var CompressionMethod; (function (CompressionMethod) { CompressionMethod["Gzip"] = "gzip"; @@ -727,12 +733,12 @@ var CompressionMethod; // This enum is for earlier version of zstd that does not have --long support CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; -})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {})); var ArchiveToolType; (function (ArchiveToolType) { ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["BSD"] = "bsd"; -})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); +})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -789,9 +795,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(1825); +const http_client_1 = __nccwpck_require__(6255); const storage_blob_1 = __nccwpck_require__(4100); const buffer = __importStar(__nccwpck_require__(4300)); const fs = __importStar(__nccwpck_require__(7147)); @@ -946,6 +952,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. @@ -1067,7 +1182,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(1825); +const http_client_1 = __nccwpck_require__(6255); const constants_1 = __nccwpck_require__(8840); function isSuccessStatusCode(statusCode) { if (!statusCode) { @@ -1511,7 +1626,8 @@ exports.getUploadOptions = getUploadOptions; */ function getDownloadOptions(copy) { const result = { - useAzureSdk: true, + useAzureSdk: false, + concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, @@ -1521,6 +1637,9 @@ function getDownloadOptions(copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } @@ -2616,1086 +2735,886 @@ exports.SearchState = SearchState; /***/ }), -/***/ 2001: -/***/ (function(__unused_webpack_module, exports) { +/***/ 3771: +/***/ ((module, exports) => { -"use strict"; +exports = module.exports = SemVer -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} } -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ } -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } -/***/ }), + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } -/***/ 1825: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } -"use strict"; + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(4977)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() } -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version } -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } + +SemVer.prototype.toString = function () { + return this.version } -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) } -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } - } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } } + return defaultResult // may be undefined + } } -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map -/***/ }), +exports.compareIdentifiers = compareIdentifiers -/***/ 4977: -/***/ ((__unused_webpack_module, exports) => { +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) -"use strict"; + if (anum && bnum) { + a = +a + b = +b + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 } -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const reqHost = reqUrl.hostname; - if (isLoopbackAddress(reqHost)) { - return true; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperNoProxyItem === '*' || - upperReqHosts.some(x => x === upperNoProxyItem || - x.endsWith(`.${upperNoProxyItem}`) || - (upperNoProxyItem.startsWith('.') && - x.endsWith(`${upperNoProxyItem}`)))) { - return true; - } - } - return false; + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) } -exports.checkBypass = checkBypass; -function isLoopbackAddress(host) { - const hostLower = host.toLowerCase(); - return (hostLower === 'localhost' || - hostLower.startsWith('127.') || - hostLower.startsWith('[::1]') || - hostLower.startsWith('[0:0:0:0:0:0:0:1]')); + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major } -//# sourceMappingURL=proxy.js.map -/***/ }), +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} -/***/ 3771: -/***/ ((module, exports) => { +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} -exports = module.exports = SemVer +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) } -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} -function tok (n) { - t[n] = R++ +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 } -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] - -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') - } - return value +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 } -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' - -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' - -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + case '': + case '=': + case '==': + return eq(a, b, loose) -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' + case '!=': + return neq(a, b, loose) -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + case '>': + return gt(a, b, loose) -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' + case '>=': + return gte(a, b, loose) -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + case '<': + return lt(a, b, loose) -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' + case '<=': + return lte(a, b, loose) -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' + default: + throw new TypeError('Invalid operator: ' + op) + } +} -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' + debug('comp', this) +} -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' +Comparator.prototype.toString = function () { + return this.value +} -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' + if (this.semver === ANY || version === ANY) { + return true + } -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) + return cmp(version, this.operator, this.semver, this.options) +} - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') } -} -exports.parse = parse -function parse (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, @@ -3703,1561 +3622,978 @@ function parse (version, options) { } } - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null - } + var rangeTmp - try { - return new SemVer(version, options) - } catch (er) { - return null + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) } -} -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan } -exports.SemVer = SemVer - -function SemVer (version, options) { +exports.Range = Range +function Range (range, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range } else { - version = version.version + return new Range(range.raw, options) } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) } - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + if (range instanceof Comparator) { + return new Range(range.value, options) } - if (!(this instanceof SemVer)) { - return new SemVer(version, options) + if (!(this instanceof Range)) { + return new Range(range, options) } - debug('SemVer', version, options) this.options = options this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) - - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) } - this.build = m[5] ? m[5].split('.') : [] this.format() } -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range } -SemVer.prototype.toString = function () { - return this.version +Range.prototype.toString = function () { + return this.range } -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) -} +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + // normalize spaces + range = range.split(/\s+/).join(' ') - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) + return set } -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') } - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) } -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined + testComparator = remainingComparators.pop() } - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } + return result } -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) } -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp } -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' } -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') } -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) + debug('tilde return', ret) + return ret + }) } -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') } -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) + debug('caret return', ret) + return ret }) } -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') } -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} + if (gtlt === '=' && anyX) { + gtlt = '' + } -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b + debug('xRange return', ret) - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b + return ret + }) +} - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') +} - case '>=': - return gte(a, b, loose) +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } - case '<': - return lt(a, b, loose) + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } - case '<=': - return lte(a, b, loose) + return (from + ' ' + to).trim() +} - default: - throw new TypeError('Invalid operator: ' + op) +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false } -} -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false } } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true } } + return false +} - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } } - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false } - debug('comp', this) + return true } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false } + return range.test(version) +} - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max } -Comparator.prototype.toString = function () { - return this.value +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) - if (this.semver === ANY || version === ANY) { - return true + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver } - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver } - return cmp(version, this.operator, this.semver, this.options) -} + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) } - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } + if (minver && range.test(minver)) { + return minver } - var rangeTmp + return null +} - if (this.operator === '') { - if (this.value === '') { - return true - } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true - } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null } +} - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) } -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') } - if (range instanceof Comparator) { - return new Range(range.value, options) + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false } - if (!(this instanceof Range)) { - return new Range(range, options) - } + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') + var high = null + var low = null - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) - } + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } - this.format() + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true } -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } -Range.prototype.toString = function () { - return this.range +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) } -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) + if (typeof version === 'number') { + version = String(version) + } - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) + if (typeof version !== 'string') { + return null + } - // normalize spaces - range = range.split(/\s+/).join(' ') + options = options || {} - // At this point, the range is completely trimmed and - // ready to be split into comparators. + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) + if (match === null) { + return null } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - return set + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) -} +/***/ }), -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() +/***/ 4138: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) +var v1 = __nccwpck_require__(1610); +var v4 = __nccwpck_require__(8373); - testComparator = remainingComparators.pop() - } +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; - return result -} +module.exports = uuid; -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} +/***/ }), -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} +/***/ 5694: +/***/ ((module) => { -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); } -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } +module.exports = bytesToUuid; - debug('tilde return', ret) - return ret - }) -} -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} +/***/ }), -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret +/***/ 4069: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. - debug('caret return', ret) - return ret - }) -} +var crypto = __nccwpck_require__(6113); -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - if (gtlt === '=' && anyX) { - gtlt = '' - } +/***/ }), - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' +/***/ 1610: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 +var rng = __nccwpck_require__(4069); +var bytesToUuid = __nccwpck_require__(5694); - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr - } +var _nodeId; +var _clockseq; - debug('xRange return', ret) +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; - return ret - }) -} +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') -} + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); - return (from + ' ' + to).trim() -} + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; } - return false -} -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; - // Version has a -pre, but it's not one of the ones we like. - return false - } + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; - return true -} + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min + + return buf ? buf : bytesToUuid(b); } -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) +module.exports = v1; - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver + +/***/ }), + +/***/ 8373: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(4069); +var bytesToUuid = __nccwpck_require__(5694); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; } + options = options || {}; - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } } - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) -} - - -/***/ }), - -/***/ 4138: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var v1 = __nccwpck_require__(1610); -var v4 = __nccwpck_require__(8373); - -var uuid = v4; -uuid.v1 = v1; -uuid.v4 = v4; - -module.exports = uuid; - - -/***/ }), - -/***/ 5694: -/***/ ((module) => { - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); -} - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 4069: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(6113); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 1610: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(4069); -var bytesToUuid = __nccwpck_require__(5694); - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html - -var _nodeId; -var _clockseq; - -// Previous uuid creation time -var _lastMSecs = 0; -var _lastNSecs = 0; - -// See https://github.com/uuidjs/uuid for API details -function v1(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - - options = options || {}; - var node = options.node || _nodeId; - var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; - - // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - if (node == null || clockseq == null) { - var seedBytes = rng(); - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [ - seedBytes[0] | 0x01, - seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] - ]; - } - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } - - // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); - - // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; - - // Time since last uuid creation (in msecs) - var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; - - // Per 4.2.1.2, Bump clockseq on clock regression - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } - - // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } - - // Per 4.2.1.2 Throw error if too many uuids are requested - if (nsecs >= 10000) { - throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - - // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - msecs += 12219292800000; - - // `time_low` - var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; - - // `time_mid` - var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; - - // `time_high_and_version` - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - b[i++] = tmh >>> 16 & 0xff; - - // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - b[i++] = clockseq >>> 8 | 0x80; - - // `clock_seq_low` - b[i++] = clockseq & 0xff; - - // `node` - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf ? buf : bytesToUuid(b); -} - -module.exports = v1; - - -/***/ }), - -/***/ 8373: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(4069); -var bytesToUuid = __nccwpck_require__(5694); - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - - return buf || bytesToUuid(rnds); -} + return buf || bytesToUuid(rnds); +} module.exports = v4; @@ -5737,7 +5073,7 @@ exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(7147)); const os = __importStar(__nccwpck_require__(2037)); -const uuid_1 = __nccwpck_require__(5840); +const uuid_1 = __nccwpck_require__(8974); const utils_1 = __nccwpck_require__(5278); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -5787,8 +5123,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(1404); -const auth_1 = __nccwpck_require__(6758); +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); const core_1 = __nccwpck_require__(2186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { @@ -5821,7 +5157,7 @@ class OidcClient { .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); + Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -6257,797 +5593,1461 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 6758: -/***/ (function(__unused_webpack_module, exports) { +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(1595)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6993)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(1472)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(6217)); + +var _nil = _interopRequireDefault(__nccwpck_require__(2381)); + +var _version = _interopRequireDefault(__nccwpck_require__(427)); + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); } -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 2381: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 6385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 6230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map /***/ }), -/***/ 1404: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 8844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; + +Object.defineProperty(exports, "__esModule", ({ + value: true })); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(2843)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + /** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); } -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; } -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); } -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _md = _interopRequireDefault(__nccwpck_require__(5842)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 1472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 6217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _sha = _interopRequireDefault(__nccwpck_require__(8844)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 2609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(6230)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 1514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(1576); +const tr = __importStar(__nccwpck_require__(8159)); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 8159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const events = __importStar(__nccwpck_require__(2361)); +const child = __importStar(__nccwpck_require__(2081)); +const path = __importStar(__nccwpck_require__(1017)); +const io = __importStar(__nccwpck_require__(7436)); +const ioUtil = __importStar(__nccwpck_require__(1962)); +const timers_1 = __nccwpck_require__(9512); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; } } + return cmd; } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + return s; } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); } + return this.toolPath; } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; } } - return info; + return this.args; } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); + _endsWith(str, end) { + return str.endsWith(end); } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); } - if (this._keepAlive && !useProxy) { - agent = this._agent; + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; } - // if agent is already assigned use that agent. - if (agent) { - return agent; + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote } else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + quoteHit = false; } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; } - return agent; + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; } - _processResponse(res, options) { + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); } - else { - obj = JSON.parse(contents); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); } - response.result = obj; - } - response.headers = res.message.headers; + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); } - catch (err) { - // Invalid resource (contents not json); leaving result obj null + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); } else { - msg = `Failed request: (${statusCode})`; + resolve(exitCode); } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); } })); }); } } -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 2843: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; + if (c === '\\' && escaped) { + append(c); + continue; } - })(); - if (proxyVar) { - return new URL(proxyVar); + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); } - else { - return undefined; + if (arg.length > 0) { + args.push(arg.trim()); } + return args; } -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } } - const reqHost = reqUrl.hostname; - if (isLoopbackAddress(reqHost)) { - return true; + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; + _debug(message) { + this.emit('debug', message); } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 4087: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperNoProxyItem === '*' || - upperReqHosts.some(x => x === upperNoProxyItem || - x.endsWith(`.${upperNoProxyItem}`) || - (upperNoProxyItem.startsWith('.') && - x.endsWith(`${upperNoProxyItem}`)))) { - return true; + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } - return false; -} -exports.checkBypass = checkBypass; -function isLoopbackAddress(host) { - const hostLower = host.toLowerCase(); - return (hostLower === 'localhost' || - hostLower.startsWith('127.') || - hostLower.startsWith('[::1]') || - hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } -//# sourceMappingURL=proxy.js.map +exports.Context = Context; +//# sourceMappingURL=context.js.map /***/ }), -/***/ 1514: +/***/ 5438: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -7060,97 +7060,289 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExecOutput = exports.exec = void 0; -const string_decoder_1 = __nccwpck_require__(1576); -const tr = __importStar(__nccwpck_require__(8159)); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(4087)); +const utils_1 = __nccwpck_require__(3030); +exports.context = new Context.Context(); /** - * Exec a command. - * Output will be streamed to the live console. - * Returns promise with return code + * Returns a hydrated octokit ready to use for GitHub Actions * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function exec(commandLine, args, options) { - return __awaiter(this, void 0, void 0, function* () { - const commandArgs = tr.argStringToArray(commandLine); - if (commandArgs.length === 0) { - throw new Error(`Parameter 'commandLine' cannot be null or empty.`); - } - // Path to tool to execute should be first arg - const toolPath = commandArgs[0]; - args = commandArgs.slice(1).concat(args || []); - const runner = new tr.ToolRunner(toolPath, args, options); - return runner.exec(); +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 7914: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(6255)); +const undici_1 = __nccwpck_require__(1773); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; } -exports.exec = exec; +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 3030: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(4087)); +const Utils = __importStar(__nccwpck_require__(7914)); +// octokit + plugins +const core_1 = __nccwpck_require__(6762); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); +const plugin_paginate_rest_1 = __nccwpck_require__(4193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); /** - * Exec a command and get the output. - * Output will be streamed to the live console. - * Returns promise with the exit code and collected stdout and stderr + * Convience function to correctly format Octokit Options to pass into the constructor. * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code, stdout, and stderr + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function getExecOutput(commandLine, args, options) { - var _a, _b; +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 8090: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = exports.create = void 0; +const internal_globber_1 = __nccwpck_require__(8298); +const internal_hash_files_1 = __nccwpck_require__(2448); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { - let stdout = ''; - let stderr = ''; - //Using string decoder covers the case where a mult-byte character is split - const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); - const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); - const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; - const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; - const stdErrListener = (data) => { - stderr += stderrDecoder.write(data); - if (originalStdErrListener) { - originalStdErrListener(data); - } - }; - const stdOutListener = (data) => { - stdout += stdoutDecoder.write(data); - if (originalStdoutListener) { - originalStdoutListener(data); - } - }; - const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); - const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); - //flush any remaining characters - stdout += stdoutDecoder.end(); - stderr += stderrDecoder.end(); - return { - exitCode, - stdout, - stderr - }; + return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } -exports.getExecOutput = getExecOutput; -//# sourceMappingURL=exec.js.map +exports.create = create; +/** + * Computes the sha256 hash of a glob + * + * @param patterns Patterns separated by newlines + * @param currentWorkspace Workspace used when matching files + * @param options Glob options + * @param verbose Enables verbose logging + */ +function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { + return __awaiter(this, void 0, void 0, function* () { + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === 'boolean') { + followSymbolicLinks = options.followSymbolicLinks; + } + const globber = yield create(patterns, { followSymbolicLinks }); + return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose); + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=glob.js.map /***/ }), -/***/ 8159: +/***/ 1026: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOptions = void 0; +const core = __importStar(__nccwpck_require__(2186)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.matchDirectories === 'boolean') { + result.matchDirectories = copy.matchDirectories; + core.debug(`matchDirectories '${result.matchDirectories}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), + +/***/ 8298: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7183,661 +7375,217 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.argStringToArray = exports.ToolRunner = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const events = __importStar(__nccwpck_require__(2361)); -const child = __importStar(__nccwpck_require__(2081)); +exports.DefaultGlobber = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const fs = __importStar(__nccwpck_require__(7147)); +const globOptionsHelper = __importStar(__nccwpck_require__(1026)); const path = __importStar(__nccwpck_require__(1017)); -const io = __importStar(__nccwpck_require__(7436)); -const ioUtil = __importStar(__nccwpck_require__(1962)); -const timers_1 = __nccwpck_require__(9512); -/* eslint-disable @typescript-eslint/unbound-method */ +const patternHelper = __importStar(__nccwpck_require__(9005)); +const internal_match_kind_1 = __nccwpck_require__(1063); +const internal_pattern_1 = __nccwpck_require__(4536); +const internal_search_state_1 = __nccwpck_require__(9117); const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. - */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); } } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); } + finally { if (e_1) throw e_1.error; } } - // Windows (regular) - else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); } } - } - else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } } - return s; - } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - return ''; - } + }); } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); } - } - return this.toolPath; - } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); } - argline += '"'; - return [argline]; } - } - return this.args; - } - _endsWith(str, end) { - return str.endsWith(end); - } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); - } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; - } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; - } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; - } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; - } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; - } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { + static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { - return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); - } - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - let stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); } - let errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; } - errbuffer = this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error('child process missing stdin'); + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } - cp.stdin.end(this.options.input); + throw err; } - })); - }); - } -} -exports.ToolRunner = ToolRunner; -/** - * Convert an arg string to an array of args. Handles escaping - * - * @param argString string of arguments - * @returns string[] array of arguments - */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; - } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; } else { - append(c); - } - continue; - } - if (c === '\\' && escaped) { - append(c); - continue; - } - if (c === '\\' && inQuotes) { - escaped = true; - continue; - } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); - } - return args; -} -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit('debug', message); - } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); + return stats; + }); } } -//# sourceMappingURL=toolrunner.js.map +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map /***/ }), -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(7147); -const os_1 = __nccwpck_require__(2037); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 5438: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 2448: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7860,27 +7608,112 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); -exports.context = new Context.Context(); +exports.hashFiles = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); +const core = __importStar(__nccwpck_require__(2186)); +const fs = __importStar(__nccwpck_require__(7147)); +const stream = __importStar(__nccwpck_require__(2781)); +const util = __importStar(__nccwpck_require__(3837)); +const path = __importStar(__nccwpck_require__(1017)); +function hashFiles(globber, currentWorkspace, verbose = false) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const writeDelegate = verbose ? core.info : core.debug; + let hasMatch = false; + const githubWorkspace = currentWorkspace + ? currentWorkspace + : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const result = crypto.createHash('sha256'); + let count = 0; + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + writeDelegate(file); + if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { + writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; + } + if (fs.statSync(file).isDirectory()) { + writeDelegate(`Skip directory '${file}'.`); + continue; + } + const hash = crypto.createHash('sha256'); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs.createReadStream(file), hash); + result.write(hash.digest()); + count++; + if (!hasMatch) { + hasMatch = true; + } + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + result.end(); + if (hasMatch) { + writeDelegate(`Found ${count} files to hash.`); + return result.digest('hex'); + } + else { + writeDelegate(`No matches found for glob`); + return ''; + } + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=internal-hash-files.js.map + +/***/ }), + +/***/ 1063: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MatchKind = void 0; /** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set + * Indicates whether a pattern matches a path */ -function getOctokit(token, options, ...additionalPlugins) { - const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); -} -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map /***/ }), -/***/ 7914: +/***/ 1849: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7904,33 +7737,188 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6341)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; + return result; } -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; } -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); } -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map /***/ }), -/***/ 3030: +/***/ 6836: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7954,49 +7942,107 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); -// octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -exports.defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Path = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const IS_WINDOWS = process.platform === 'win32'; /** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set + * Helper class for parsing paths into segments */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; } - return opts; } -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map /***/ }), -/***/ 6341: +/***/ 9005: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -/* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); @@ -8016,60 +8062,535 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(3466)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(__nccwpck_require__(1849)); +const internal_match_kind_1 = __nccwpck_require__(1063); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map + +/***/ }), + +/***/ 4536: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Pattern = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const minimatch_1 = __nccwpck_require__(3973); +const internal_match_kind_1 = __nccwpck_require__(1063); +const internal_path_1 = __nccwpck_require__(6836); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map + +/***/ }), + +/***/ 9117: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map + +/***/ }), + +/***/ 5526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 6255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com @@ -8120,7 +8641,20 @@ class HttpClientResponse { })); }); } -} + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { const parsedUrl = new URL(requestUrl); @@ -8425,6 +8959,15 @@ class HttpClient { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -8524,6 +9067,30 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); @@ -8603,7 +9170,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 3466: +/***/ 9835: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -8624,7 +9191,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -8686,58 +9259,7 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 8090: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = exports.create = void 0; -const internal_globber_1 = __nccwpck_require__(8298); -const internal_hash_files_1 = __nccwpck_require__(2448); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); -} -exports.create = create; -/** - * Computes the sha256 hash of a glob - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function hashFiles(patterns, options, verbose = false) { - return __awaiter(this, void 0, void 0, function* () { - let followSymbolicLinks = true; - if (options && typeof options.followSymbolicLinks === 'boolean') { - followSymbolicLinks = options.followSymbolicLinks; - } - const globber = yield create(patterns, { followSymbolicLinks }); - return internal_hash_files_1.hashFiles(globber, verbose); - }); -} -exports.hashFiles = hashFiles; -//# sourceMappingURL=glob.js.map - -/***/ }), - -/***/ 1026: +/***/ 1962: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -8761,45 +9283,173 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOptions = void 0; -const core = __importStar(__nccwpck_require__(2186)); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const path = __importStar(__nccwpck_require__(1017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; /** - * Returns a copy with defaults filled in. + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - matchDirectories: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } } - if (typeof copy.matchDirectories === 'boolean') { - result.matchDirectories = copy.matchDirectories; - core.debug(`matchDirectories '${result.matchDirectories}'`); + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); } - return result; + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); } -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map /***/ }), -/***/ 8298: +/***/ 7436: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -8832,217 +9482,281 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultGlobber = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const fs = __importStar(__nccwpck_require__(7147)); -const globOptionsHelper = __importStar(__nccwpck_require__(1026)); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(9491); const path = __importStar(__nccwpck_require__(1017)); -const patternHelper = __importStar(__nccwpck_require__(9005)); -const internal_match_kind_1 = __nccwpck_require__(1063); -const internal_pattern_1 = __nccwpck_require__(4536); -const internal_search_state_1 = __nccwpck_require__(9117); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); - } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } +const ioUtil = __importStar(__nccwpck_require__(1962)); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } + else { + yield cpDirRecursive(source, newDest, 0, force); } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); - } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); + else { + throw new Error('Destination already exists'); } } - }); - } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; + } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); } - // Pattern else { - result.patterns.push(new internal_pattern_1.Pattern(line)); + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; - }); - } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); } } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); + yield copyFile(srcFile, destFile, force); } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); } - // Update the traversal chain - traversalChain.push(realPath); + // other errors = it doesn't exist, no work to do } - return stats; - }); - } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); } -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map +//# sourceMappingURL=io.js.map /***/ }), -/***/ 2448: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 2473: +/***/ (function(module, exports, __nccwpck_require__) { "use strict"; @@ -9074,101 +9788,109 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = void 0; -const crypto = __importStar(__nccwpck_require__(6113)); -const core = __importStar(__nccwpck_require__(2186)); -const fs = __importStar(__nccwpck_require__(7147)); -const stream = __importStar(__nccwpck_require__(2781)); -const util = __importStar(__nccwpck_require__(3837)); -const path = __importStar(__nccwpck_require__(1017)); -function hashFiles(globber, verbose = false) { - var e_1, _a; - var _b; +exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0; +const semver = __importStar(__nccwpck_require__(562)); +const core_1 = __nccwpck_require__(2186); +// needs to be require for core node modules to be mocked +/* eslint @typescript-eslint/no-require-imports: 0 */ +const os = __nccwpck_require__(2037); +const cp = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core.info : core.debug; - let hasMatch = false; - const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); - const result = crypto.createHash('sha256'); - let count = 0; - try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { - writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); - continue; - } - if (fs.statSync(file).isDirectory()) { - writeDelegate(`Skip directory '${file}'.`); - continue; - } - const hash = crypto.createHash('sha256'); - const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs.createReadStream(file), hash); - result.write(hash.digest()); - count++; - if (!hasMatch) { - hasMatch = true; + const platFilter = os.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version = candidate.version; + core_1.debug(`check ${version} satisfies ${versionSpec}`); + if (semver.satisfies(version, versionSpec) && + (!stable || candidate.stable === stable)) { + file = candidate.files.find(item => { + core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } + else { + chk = semver.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + core_1.debug(`matched ${candidate.version}`); + match = candidate; + break; } } } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); - } - finally { if (e_1) throw e_1.error; } - } - result.end(); - if (hasMatch) { - writeDelegate(`Found ${count} files to hash.`); - return result.digest('hex'); - } - else { - writeDelegate(`No matches found for glob`); - return ''; + if (match && file) { + // clone since we're mutating the file list to be only the file that matches + result = Object.assign({}, match); + result.files = [file]; } + return result; }); } -exports.hashFiles = hashFiles; -//# sourceMappingURL=internal-hash-files.js.map - -/***/ }), - -/***/ 1063: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MatchKind = void 0; -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map +exports._findMatch = _findMatch; +function _getOsVersion() { + // TODO: add windows and other linux, arm variants + // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) + const plat = os.platform(); + let version = ''; + if (plat === 'darwin') { + version = cp.execSync('sw_vers -productVersion').toString(); + } + else if (plat === 'linux') { + // lsb_release process not in some containers, readfile + // Run cat /etc/lsb-release + // DISTRIB_ID=Ubuntu + // DISTRIB_RELEASE=18.04 + // DISTRIB_CODENAME=bionic + // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" + const lsbContents = module.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split('\n'); + for (const line of lines) { + const parts = line.split('='); + if (parts.length === 2 && + (parts[0].trim() === 'VERSION_ID' || + parts[0].trim() === 'DISTRIB_RELEASE')) { + version = parts[1] + .trim() + .replace(/^"/, '') + .replace(/"$/, ''); + break; + } + } + } + } + return version; +} +exports._getOsVersion = _getOsVersion; +function _readLinuxVersionFile() { + const lsbReleaseFile = '/etc/lsb-release'; + const osReleaseFile = '/etc/os-release'; + let contents = ''; + if (fs.existsSync(lsbReleaseFile)) { + contents = fs.readFileSync(lsbReleaseFile).toString(); + } + else if (fs.existsSync(osReleaseFile)) { + contents = fs.readFileSync(osReleaseFile).toString(); + } + return contents; +} +exports._readLinuxVersionFile = _readLinuxVersionFile; +//# sourceMappingURL=manifest.js.map /***/ }), -/***/ 1849: +/***/ 8279: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -9192,188 +9914,73 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. - * - * For example, on Linux/macOS: - * - `/ => /` - * - `/hello => /` - * - * For example, on Windows: - * - `C:\ => C:\` - * - `C:\hello => C:\` - * - `C: => C:` - * - `C:hello => C:` - * - `\ => \` - * - `\hello => \` - * - `\\hello => \\hello` - * - `\\hello\world => \\hello\world` - */ -function dirname(p) { - // Normalize slashes and trim unnecessary trailing slash - p = safeTrimTrailingSeparator(p); - // Windows UNC root, e.g. \\hello or \\hello\world - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; - } - // Get dirname - let result = path.dirname(p); - // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); - } - return result; -} -exports.dirname = dirname; +exports.RetryHelper = void 0; +const core = __importStar(__nccwpck_require__(2186)); /** - * Roots the path if not already rooted. On Windows, relative roots like `\` - * or `C:` are expanded based on the current working directory. + * Internal class for retries */ -function ensureAbsoluteRoot(root, itemPath) { - assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - // Already rooted - if (hasAbsoluteRoot(itemPath)) { - return itemPath; +class RetryHelper { + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error('max attempts should be greater than or equal to 1'); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error('min seconds should be less than or equal to max seconds'); + } } - // Windows - if (IS_WINDOWS) { - // Check for itemPath like C: or C:foo - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - // Drive letter matches cwd? Expand to cwd - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - // Drive only, e.g. C: - if (itemPath.length === 2) { - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}`; + execute(action, isRetryable) { + return __awaiter(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + // Try + try { + return yield action(); } - // Drive + path, e.g. C:foo - else { - if (!cwd.endsWith('\\')) { - cwd += '\\'; + catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; } - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + core.info(err.message); } + // Sleep + const seconds = this.getSleepAmount(); + core.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; } - // Different drive - else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } - // Check for itemPath like \ or \foo - else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; - } - } - assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - // Otherwise ensure root ends with a separator - if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { - // Intentionally empty - } - else { - // Append separator - root += path.sep; - } - return root + itemPath; -} -exports.ensureAbsoluteRoot = ensureAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\\hello\share` and `C:\hello` (and using alternate separator). - */ -function hasAbsoluteRoot(itemPath) { - assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \\hello\share or C:\hello - return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasAbsoluteRoot = hasAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). - */ -function hasRoot(itemPath) { - assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \ or \hello or \\hello - // E.g. C: or C:\hello - return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasRoot = hasRoot; -/** - * Removes redundant slashes and converts `/` to `\` on Windows - */ -function normalizeSeparators(p) { - p = p || ''; - // Windows - if (IS_WINDOWS) { - // Convert slashes on Windows - p = p.replace(/\//g, '\\'); - // Remove redundant slashes - const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello - return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC - } - // Remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -exports.normalizeSeparators = normalizeSeparators; -/** - * Normalizes the path separators and trims the trailing separator (when safe). - * For example, `/foo/ => /foo` but `/ => /` - */ -function safeTrimTrailingSeparator(p) { - // Short-circuit if empty - if (!p) { - return ''; - } - // Normalize separators - p = normalizeSeparators(p); - // No trailing slash - if (!p.endsWith(path.sep)) { - return p; + // Last attempt + return yield action(); + }); } - // Check '/' on Linux/macOS and '\' on Windows - if (p === path.sep) { - return p; + getSleepAmount() { + return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + + this.minSeconds); } - // On Windows check if drive root. E.g. C:\ - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; + sleep(seconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, seconds * 1000)); + }); } - // Otherwise trim trailing slash - return p.substr(0, p.length - 1); } -exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; -//# sourceMappingURL=internal-path-helper.js.map +exports.RetryHelper = RetryHelper; +//# sourceMappingURL=retry-helper.js.map /***/ }), -/***/ 6836: +/***/ 7784: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -9397,5165 +10004,3289 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Path = void 0; +exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const io = __importStar(__nccwpck_require__(7436)); +const fs = __importStar(__nccwpck_require__(7147)); +const mm = __importStar(__nccwpck_require__(2473)); +const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); +const httpm = __importStar(__nccwpck_require__(6255)); +const semver = __importStar(__nccwpck_require__(562)); +const stream = __importStar(__nccwpck_require__(2781)); +const util = __importStar(__nccwpck_require__(3837)); +const assert_1 = __nccwpck_require__(9491); +const v4_1 = __importDefault(__nccwpck_require__(7468)); +const exec_1 = __nccwpck_require__(1514); +const retry_helper_1 = __nccwpck_require__(8279); +class HTTPError extends Error { + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.HTTPError = HTTPError; const IS_WINDOWS = process.platform === 'win32'; +const IS_MAC = process.platform === 'darwin'; +const userAgent = 'actions/tool-cache'; /** - * Helper class for parsing paths into segments + * Download a tool from an url and stream it into a file + * + * @param url url of tool to download + * @param dest path to download tool + * @param auth authorization header + * @param headers other headers + * @returns path to downloaded tool */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); +function downloadTool(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + dest = dest || path.join(_getTempDirectory(), v4_1.default()); + yield io.mkdirP(path.dirname(dest)); + core.debug(`Downloading ${url}`); + core.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); + const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || '', auth, headers); + }), (err) => { + if (err instanceof HTTPError && err.httpStatusCode) { + // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests + if (err.httpStatusCode < 500 && + err.httpStatusCode !== 408 && + err.httpStatusCode !== 429) { + return false; } - // Remainder is the root - this.segments.unshift(remaining); } + // Otherwise retry + return true; + }); + }); +} +exports.downloadTool = downloadTool; +function downloadToolAttempt(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); } - // Array - else { - // Must not be empty - assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } + // Get the response headers + const http = new httpm.HttpClient(userAgent, [], { + allowRetries: false + }); + if (auth) { + core.debug('set auth'); + if (headers === undefined) { + headers = {}; } + headers.authorization = auth; } - } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError(response.message.statusCode); + core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; + } + // Download the response body + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs.createWriteStream(dest)); + core.debug('download complete'); + succeeded = true; + return dest; + } + finally { + // Error, delete dest before retry + if (!succeeded) { + core.debug('download failed'); + try { + yield io.rmRF(dest); + } + catch (err) { + core.debug(`Failed to delete '${dest}'. ${err.message}`); + } } - result += this.segments[i]; } - return result; - } + }); } -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map - -/***/ }), - -/***/ 9005: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.partialMatch = exports.match = exports.getSearchPaths = void 0; -const pathHelper = __importStar(__nccwpck_require__(1849)); -const internal_match_kind_1 = __nccwpck_require__(1063); -const IS_WINDOWS = process.platform === 'win32'; /** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. + * Extract a .7z file + * + * @param file path to the .7z file + * @param dest destination directory. Optional. + * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this + * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will + * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is + * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line + * interface, it is smaller than the full command line interface, and it does support long paths. At the + * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. + * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path + * to 7zr.exe can be pass to this function. + * @returns path to the destination directory */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; - } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; +function extract7z(file, dest, _7zPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { + try { + const logLevel = core.isDebug() ? '-bb1' : '-bb0'; + const args = [ + 'x', + logLevel, + '-bd', + '-sccUTF-8', + file + ]; + const options = { + silent: true + }; + yield exec_1.exec(`"${_7zPath}"`, args, options); + } + finally { + process.chdir(originalCwd); } - tempKey = parent; - parent = pathHelper.dirname(tempKey); } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; + else { + const escapedScript = path + .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') + .replace(/'/g, "''") + .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io.which('powershell', true); + yield exec_1.exec(`"${powershellPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } } - } - return result; + return dest; + }); } -exports.getSearchPaths = getSearchPaths; +exports.extract7z = extract7z; /** - * Matches the patterns against the path + * Extract a compressed tar archive + * + * @param file path to the tar + * @param dest destination directory. Optional. + * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. + * @returns path to the destination directory */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); +function extractTar(file, dest, flags = 'xz') { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + // Create dest + dest = yield _createExtractFolder(dest); + // Determine whether GNU tar + core.debug('Checking tar --version'); + let versionOutput = ''; + yield exec_1.exec('tar --version', [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); + // Initialize args + let args; + if (flags instanceof Array) { + args = flags; } else { - result |= pattern.match(itemPath); + args = [flags]; } - } - return result; + if (core.isDebug() && !flags.includes('v')) { + args.push('-v'); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push('--force-local'); + destArg = dest.replace(/\\/g, '/'); + // Technically only the dest needs to have `/` but for aesthetic consistency + // convert slashes in the file arg too. + fileArg = file.replace(/\\/g, '/'); + } + if (isGnuTar) { + // Suppress warnings when using GNU tar to extract archives created by BSD tar + args.push('--warning=no-unknown-keyword'); + args.push('--overwrite'); + } + args.push('-C', destArg, '-f', fileArg); + yield exec_1.exec(`tar`, args); + return dest; + }); } -exports.match = match; +exports.extractTar = extractTar; /** - * Checks whether to descend further into the directory + * Extract a xar compatible archive + * + * @param file path to the archive + * @param dest destination directory. Optional. + * @param flags flags for the xar. Optional. + * @returns path to the destination directory */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); -} -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map - -/***/ }), - -/***/ 4536: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Pattern = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const minimatch_1 = __nccwpck_require__(3973); -const internal_match_kind_1 = __nccwpck_require__(1063); -const internal_path_1 = __nccwpck_require__(6836); -const IS_WINDOWS = process.platform === 'win32'; -class Pattern { - constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { - /** - * Indicates whether matches should be excluded from the result set - */ - this.negate = false; - // Pattern overload - let pattern; - if (typeof patternOrNegate === 'string') { - pattern = patternOrNegate.trim(); +function extractXar(file, dest, flags = []) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; } - // Segments overload else { - // Convert to pattern - segments = segments || []; - assert_1.default(segments.length, `Parameter 'segments' must not empty`); - const root = Pattern.getLiteral(segments[0]); - assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - // Negate - while (pattern.startsWith('!')) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); + args = [flags]; } - // Normalize slashes and ensures absolute root - pattern = Pattern.fixupPattern(pattern, homedir); - // Segments - this.segments = new internal_path_1.Path(pattern).segments; - // Trailing slash indicates the pattern should only match directories, not regular files - this.trailingSeparator = pathHelper - .normalizeSeparators(pattern) - .endsWith(path.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - // Search path (literal path prior to the first glob segment) - let foundGlob = false; - const searchSegments = this.segments - .map(x => Pattern.getLiteral(x)) - .filter(x => !foundGlob && !(foundGlob = x === '')); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - // Root RegExp (required when determining partial match) - this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); - this.isImplicitPattern = isImplicitPattern; - // Create minimatch - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - // Last segment is globstar? - if (this.segments[this.segments.length - 1] === '**') { - // Normalize slashes - itemPath = pathHelper.normalizeSeparators(itemPath); - // Append a trailing slash. Otherwise Minimatch will not match the directory immediately - // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns - // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. - if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { - // Note, this is safe because the constructor ensures the pattern has an absolute root. - // For example, formats like C: and C:foo on Windows are resolved to an absolute root. - itemPath = `${itemPath}${path.sep}`; - } + args.push('-x', '-C', dest, '-f', file); + if (core.isDebug()) { + args.push('-v'); } - else { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + const xarPath = yield io.which('xar', true); + yield exec_1.exec(`"${xarPath}"`, _unique(args)); + return dest; + }); +} +exports.extractXar = extractXar; +/** + * Extract a zip + * + * @param file path to the zip + * @param dest destination directory. Optional. + * @returns path to the destination directory + */ +function extractZip(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); } - // Match - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); } - return internal_match_kind_1.MatchKind.None; - } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // matchOne does not handle root path correctly - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); + else { + yield extractZipNix(file, dest); } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); - } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS - .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment - .replace(/\?/g, '[?]') // escape '?' - .replace(/\*/g, '[*]'); // escape '*' - } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern, homedir) { - // Empty - assert_1.default(pattern, 'pattern cannot be empty'); - // Must not contain `.` segment, unless first segment - // Must not contain `..` segment - const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); - assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r - assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - // Normalize slashes - pattern = pathHelper.normalizeSeparators(pattern); - // Replace leading `.` segment - if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { - pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + return dest; + }); +} +exports.extractZip = extractZip; +function extractZipWin(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + // build the powershell command + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const pwshPath = yield io.which('pwsh', false); + //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory + //and the -Force flag for Expand-Archive as a fallback + if (pwshPath) { + //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive + const pwshCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, + `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, + `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` + ].join(' '); + const args = [ + '-NoLogo', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + pwshCommand + ]; + core.debug(`Using pwsh at path: ${pwshPath}`); + yield exec_1.exec(`"${pwshPath}"`, args); } - // Replace leading `~` segment - else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { - homedir = homedir || os.homedir(); - assert_1.default(homedir, 'Unable to determine HOME directory'); - assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = Pattern.globEscape(homedir) + pattern.substr(1); + else { + const powershellCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, + `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, + `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` + ].join(' '); + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + powershellCommand + ]; + const powershellPath = yield io.which('powershell', true); + core.debug(`Using powershell at path: ${powershellPath}`); + yield exec_1.exec(`"${powershellPath}"`, args); } - // Replace relative drive root, e.g. pattern is C: or C:foo - else if (IS_WINDOWS && - (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(2); + }); +} +function extractZipNix(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + const unzipPath = yield io.which('unzip', true); + const args = [file]; + if (!core.isDebug()) { + args.unshift('-q'); } - // Replace relative root, e.g. pattern is \ or \foo - else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); - if (!root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(1); + args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run + yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); + }); +} +/** + * Caches a directory and installs it into the tool cacheDir + * + * @param sourceDir the directory to cache into tools + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheDir(sourceDir, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source dir: ${sourceDir}`); + if (!fs.statSync(sourceDir).isDirectory()) { + throw new Error('sourceDir is not a directory'); } - // Otherwise ensure absolute root - else { - pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + // Create the tool dir + const destPath = yield _createToolPath(tool, version, arch); + // copy each child item. do not move. move can fail on Windows + // due to anti-virus software having an open handle on a file. + for (const itemName of fs.readdirSync(sourceDir)) { + const s = path.join(sourceDir, itemName); + yield io.cp(s, destPath, { recursive: true }); } - return pathHelper.normalizeSeparators(pattern); - } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ''; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - // Escape - if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } - // Wildcard - else if (c === '*' || c === '?') { - return ''; - } - // Character set - else if (c === '[' && i + 1 < segment.length) { - let set = ''; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - // Escape - if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { - set += segment[++i2]; - continue; - } - // Closed - else if (c2 === ']') { - closed = i2; - break; - } - // Otherwise - else { - set += c2; - } - } - // Closed? - if (closed >= 0) { - // Cannot convert - if (set.length > 1) { - return ''; - } - // Convert to literal - if (set) { - literal += set; - i = closed; - continue; - } - } - // Otherwise fall thru - } - // Append - literal += c; - } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); - } -} -exports.Pattern = Pattern; -//# sourceMappingURL=internal-pattern.js.map - -/***/ }), - -/***/ 9117: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SearchState = void 0; -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } + // write .complete + _completeToolPath(tool, version, arch); + return destPath; + }); } -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map - -/***/ }), - -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const http = __nccwpck_require__(3685); -const https = __nccwpck_require__(5687); -const pm = __nccwpck_require__(6443); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +exports.cacheDir = cacheDir; /** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + * Caches a downloaded file (GUID) and installs it + * into the tool cache with a given targetName + * + * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. + * @param targetFile the name of the file name in the tools directory + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; +function cacheFile(sourceFile, targetFile, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source file: ${sourceFile}`); + if (!fs.statSync(sourceFile).isFile()) { + throw new Error('sourceFile is not a file'); + } + // create the tool dir + const destFolder = yield _createToolPath(tool, version, arch); + // copy instead of move. move can fail on Windows due to + // anti-virus software having an open handle on a file. + const destPath = path.join(destFolder, targetFile); + core.debug(`destination file ${destPath}`); + yield io.cp(sourceFile, destPath); + // write .complete + _completeToolPath(tool, version, arch); + return destFolder; + }); } -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); +exports.cacheFile = cacheFile; +/** + * Finds the path to a tool version in the local installed tool cache + * + * @param toolName name of the tool + * @param versionSpec version of the tool + * @param arch optional arch. defaults to arch of computer + */ +function find(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error('toolName parameter is required'); } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; + if (!versionSpec) { + throw new Error('versionSpec parameter is required'); } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); + arch = arch || os.arch(); + // attempt to resolve an explicit version + if (!isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = evaluateVersions(localVersions, versionSpec); + versionSpec = match; } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } + // check for the explicit version in the cache + let toolPath = ''; + if (versionSpec) { + versionSpec = semver.clean(versionSpec) || ''; + const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); + core.debug(`checking cache: ${cachePath}`); + if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { + core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } + else { + core.debug('not found'); } } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } + return toolPath; +} +exports.find = find; +/** + * Finds the paths to all versions of a tool that are installed in the local tool cache + * + * @param toolName name of the tool + * @param arch optional arch. defaults to arch of computer + */ +function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os.arch(); + const toolPath = path.join(_getCacheDirectory(), toolName); + if (fs.existsSync(toolPath)) { + const children = fs.readdirSync(toolPath); + for (const child of children) { + if (isExplicitVersion(child)) { + const fullPath = path.join(toolPath, child, arch || ''); + if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { + versions.push(child); } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); } } - return response; } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); + return versions; +} +exports.findAllVersions = findAllVersions; +function getManifestFromRepo(owner, repo, auth, branch = 'master') { + return __awaiter(this, void 0, void 0, function* () { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient('tool-cache'); + const headers = {}; + if (auth) { + core.debug('set auth'); + headers.authorization = auth; } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); + let manifestUrl = ''; + for (const item of response.result.tree) { + if (item.path === 'versions-manifest.json') { + manifestUrl = item.url; + break; } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); + } + headers['accept'] = 'application/vnd.github.VERSION.raw'; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + // shouldn't be needed but protects against invalid json saved with BOM + versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); + try { + releases = JSON.parse(versionsRaw); + } + catch (_a) { + core.debug('Invalid json'); } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); + return releases; + }); +} +exports.getManifestFromRepo = getManifestFromRepo; +function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // wrap the internal impl + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); +} +exports.findFromManifest = findFromManifest; +function _createExtractFolder(dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!dest) { + // create a temp dir + dest = path.join(_getTempDirectory(), v4_1.default()); } - else { - req.end(); + yield io.mkdirP(dest); + return dest; + }); +} +function _createToolPath(tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + core.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io.rmRF(folderPath); + yield io.rmRF(markerPath); + yield io.mkdirP(folderPath); + return folderPath; + }); +} +function _completeToolPath(tool, version, arch) { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + const markerPath = `${folderPath}.complete`; + fs.writeFileSync(markerPath, ''); + core.debug('finished caching tool'); +} +/** + * Check if version string is explicit + * + * @param versionSpec version string to check + */ +function isExplicitVersion(versionSpec) { + const c = semver.clean(versionSpec) || ''; + core.debug(`isExplicit: ${c}`); + const valid = semver.valid(c) != null; + core.debug(`explicit? ${valid}`); + return valid; +} +exports.isExplicitVersion = isExplicitVersion; +/** + * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec` + * + * @param versions array of versions to evaluate + * @param versionSpec semantic version spec to satisfy + */ +function evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; } } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); + if (version) { + core.debug(`matched: ${version}`); } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; - } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __nccwpck_require__(4294); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - ...((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - }), - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); + else { + core.debug('match not found'); } + return version; } -exports.HttpClient = HttpClient; - +exports.evaluateVersions = evaluateVersions; +/** + * Gets RUNNER_TOOL_CACHE + */ +function _getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); + return cacheDirectory; +} +/** + * Gets RUNNER_TEMP + */ +function _getTempDirectory() { + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + return tempDirectory; +} +/** + * Gets a global variable + */ +function _getGlobal(key, defaultValue) { + /* eslint-disable @typescript-eslint/no-explicit-any */ + const value = global[key]; + /* eslint-enable @typescript-eslint/no-explicit-any */ + return value !== undefined ? value : defaultValue; +} +/** + * Returns an array of unique values. + * @param values Values to make unique. + */ +function _unique(values) { + return Array.from(new Set(values)); +} +//# sourceMappingURL=tool-cache.js.map /***/ }), -/***/ 6443: -/***/ ((__unused_webpack_module, exports) => { +/***/ 562: +/***/ ((module, exports) => { -"use strict"; +exports = module.exports = SemVer -Object.defineProperty(exports, "__esModule", ({ value: true })); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = new URL(proxyVar); - } - return proxyUrl; -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} } -exports.checkBypass = checkBypass; +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' -/***/ }), +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 -/***/ 1962: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 -"use strict"; +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; -const fs = __importStar(__nccwpck_require__(7147)); -const path = __importStar(__nccwpck_require__(1017)); -_a = fs.promises -// export const {open} = 'fs' -, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -// export const {open} = 'fs' -exports.IS_WINDOWS = process.platform === 'win32'; -// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 -exports.UV_FS_O_EXLOCK = 0x10000000; -exports.READONLY = fs.constants.O_RDONLY; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello - } - return p.startsWith('/'); -} -exports.isRooted = isRooted; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; - }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); - } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ } -// Get the path of cmd.exe in windows -function getCmdPath() { - var _a; - return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value } -exports.getCmdPath = getCmdPath; -//# sourceMappingURL=io-util.js.map -/***/ }), +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. -/***/ 7436: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. -"use strict"; +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; -const assert_1 = __nccwpck_require__(9491); -const path = __importStar(__nccwpck_require__(1017)); -const ioUtil = __importStar(__nccwpck_require__(1962)); -/** - * Copies a file or folder. - * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js - * - * @param source source path - * @param dest destination path - * @param options optional. See CopyOptions. - */ -function cp(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const { force, recursive, copySourceDirectory } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - // Dest is an existing file, but not forcing - if (destStat && destStat.isFile() && !force) { - return; - } - // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() && copySourceDirectory - ? path.join(dest, path.basename(source)) - : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } - else { - yield cpDirRecursive(source, newDest, 0, force); - } - } - else { - if (path.relative(source, newDest) === '') { - // a file cannot be copied to itself - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); -} -exports.cp = cp; -/** - * Moves a path. - * - * @param source source path - * @param dest destination path - * @param options optional. See MoveOptions. - */ -function mv(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - // If dest is directory copy src into dest - dest = path.join(dest, path.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } - else { - throw new Error('Destination already exists'); - } - } - } - yield mkdirP(path.dirname(dest)); - yield ioUtil.rename(source, dest); - }); -} -exports.mv = mv; -/** - * Remove a path recursively with force - * - * @param inputPath path to remove - */ -function rmRF(inputPath) { - return __awaiter(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - // Check for invalid characters - // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file - if (/[*"<>|]/.test(inputPath)) { - throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); - } - } - try { - // note if path does not exist, error is silent - yield ioUtil.rm(inputPath, { - force: true, - maxRetries: 3, - recursive: true, - retryDelay: 300 - }); - } - catch (err) { - throw new Error(`File was unable to be removed ${err}`); - } - }); -} -exports.rmRF = rmRF; -/** - * Make a directory. Creates the full path with folders in between - * Will throw if it fails - * - * @param fsPath path to create - * @returns Promise - */ -function mkdirP(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - yield ioUtil.mkdir(fsPath, { recursive: true }); - }); -} -exports.mkdirP = mkdirP; -/** - * Returns path of a tool had the tool actually been invoked. Resolves via paths. - * If you check and the tool does not exist, it will throw. - * - * @param tool name of the tool - * @param check whether to check if tool exists - * @returns Promise path to tool - */ -function which(tool, check) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // recursive when check=true - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); - } - else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); - } - } - return result; - } - const matches = yield findInPath(tool); - if (matches && matches.length > 0) { - return matches[0]; - } - return ''; - }); -} -exports.which = which; -/** - * Returns a list of all occurrences of the given tool on the system path. - * - * @returns Promise the paths of the tool - */ -function findInPath(tool) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { - for (const extension of process.env['PATHEXT'].split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return [filePath]; - } - return []; - } - // if any path separators, return empty - if (tool.includes(path.sep)) { - return []; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // find all matches - const matches = []; - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); - if (filePath) { - matches.push(filePath); - } - } - return matches; - }); -} -exports.findInPath = findInPath; -function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - const copySourceDirectory = options.copySourceDirectory == null - ? true - : Boolean(options.copySourceDirectory); - return { force, recursive, copySourceDirectory }; -} -function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure there is not a run away recursive copy - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - // Recurse - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } - else { - yield copyFile(srcFile, destFile, force); - } - } - // Change the mode for the newly created directory - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); - }); -} -// Buffered file copy -function copyFile(srcFile, destFile, force) { - return __awaiter(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - // unlink/re-link it - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } - catch (e) { - // Try to override file permission - if (e.code === 'EPERM') { - yield ioUtil.chmod(destFile, '0666'); - yield ioUtil.unlink(destFile); - } - // other errors = it doesn't exist, no work to do - } - // Copy over symlink - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); - } - else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); - } - }); -} -//# sourceMappingURL=io.js.map +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. -/***/ }), +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' -/***/ 2473: -/***/ (function(module, exports, __nccwpck_require__) { +// ## Main Version +// Three dot-separated numeric identifiers. -"use strict"; +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0; -const semver = __importStar(__nccwpck_require__(562)); -const core_1 = __nccwpck_require__(2186); -// needs to be require for core node modules to be mocked -/* eslint @typescript-eslint/no-require-imports: 0 */ -const os = __nccwpck_require__(2037); -const cp = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -function _findMatch(versionSpec, stable, candidates, archFilter) { - return __awaiter(this, void 0, void 0, function* () { - const platFilter = os.platform(); - let result; - let match; - let file; - for (const candidate of candidates) { - const version = candidate.version; - core_1.debug(`check ${version} satisfies ${versionSpec}`); - if (semver.satisfies(version, versionSpec) && - (!stable || candidate.stable === stable)) { - file = candidate.files.find(item => { - core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); - let chk = item.arch === archFilter && item.platform === platFilter; - if (chk && item.platform_version) { - const osVersion = module.exports._getOsVersion(); - if (osVersion === item.platform_version) { - chk = true; - } - else { - chk = semver.satisfies(osVersion, item.platform_version); - } - } - return chk; - }); - if (file) { - core_1.debug(`matched ${candidate.version}`); - match = candidate; - break; - } - } - } - if (match && file) { - // clone since we're mutating the file list to be only the file that matches - result = Object.assign({}, match); - result.files = [file]; - } - return result; - }); -} -exports._findMatch = _findMatch; -function _getOsVersion() { - // TODO: add windows and other linux, arm variants - // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) - const plat = os.platform(); - let version = ''; - if (plat === 'darwin') { - version = cp.execSync('sw_vers -productVersion').toString(); - } - else if (plat === 'linux') { - // lsb_release process not in some containers, readfile - // Run cat /etc/lsb-release - // DISTRIB_ID=Ubuntu - // DISTRIB_RELEASE=18.04 - // DISTRIB_CODENAME=bionic - // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" - const lsbContents = module.exports._readLinuxVersionFile(); - if (lsbContents) { - const lines = lsbContents.split('\n'); - for (const line of lines) { - const parts = line.split('='); - if (parts.length === 2 && - (parts[0].trim() === 'VERSION_ID' || - parts[0].trim() === 'DISTRIB_RELEASE')) { - version = parts[1] - .trim() - .replace(/^"/, '') - .replace(/"$/, ''); - break; - } - } - } - } - return version; -} -exports._getOsVersion = _getOsVersion; -function _readLinuxVersionFile() { - const lsbReleaseFile = '/etc/lsb-release'; - const osReleaseFile = '/etc/os-release'; - let contents = ''; - if (fs.existsSync(lsbReleaseFile)) { - contents = fs.readFileSync(lsbReleaseFile).toString(); - } - else if (fs.existsSync(osReleaseFile)) { - contents = fs.readFileSync(osReleaseFile).toString(); - } - return contents; -} -exports._readLinuxVersionFile = _readLinuxVersionFile; -//# sourceMappingURL=manifest.js.map +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' -/***/ }), +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. -/***/ 8279: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -"use strict"; +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RetryHelper = void 0; -const core = __importStar(__nccwpck_require__(2186)); -/** - * Internal class for retries - */ -class RetryHelper { - constructor(maxAttempts, minSeconds, maxSeconds) { - if (maxAttempts < 1) { - throw new Error('max attempts should be greater than or equal to 1'); - } - this.maxAttempts = maxAttempts; - this.minSeconds = Math.floor(minSeconds); - this.maxSeconds = Math.floor(maxSeconds); - if (this.minSeconds > this.maxSeconds) { - throw new Error('min seconds should be less than or equal to max seconds'); - } - } - execute(action, isRetryable) { - return __awaiter(this, void 0, void 0, function* () { - let attempt = 1; - while (attempt < this.maxAttempts) { - // Try - try { - return yield action(); - } - catch (err) { - if (isRetryable && !isRetryable(err)) { - throw err; - } - core.info(err.message); - } - // Sleep - const seconds = this.getSleepAmount(); - core.info(`Waiting ${seconds} seconds before trying again`); - yield this.sleep(seconds); - attempt++; - } - // Last attempt - return yield action(); - }); - } - getSleepAmount() { - return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + - this.minSeconds); - } - sleep(seconds) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => setTimeout(resolve, seconds * 1000)); - }); - } -} -exports.RetryHelper = RetryHelper; -//# sourceMappingURL=retry-helper.js.map +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. -/***/ }), +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' -/***/ 7784: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' -"use strict"; +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const io = __importStar(__nccwpck_require__(7436)); -const fs = __importStar(__nccwpck_require__(7147)); -const mm = __importStar(__nccwpck_require__(2473)); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const httpm = __importStar(__nccwpck_require__(9925)); -const semver = __importStar(__nccwpck_require__(562)); -const stream = __importStar(__nccwpck_require__(2781)); -const util = __importStar(__nccwpck_require__(3837)); -const v4_1 = __importDefault(__nccwpck_require__(7468)); -const exec_1 = __nccwpck_require__(1514); -const assert_1 = __nccwpck_require__(9491); -const retry_helper_1 = __nccwpck_require__(8279); -class HTTPError extends Error { - constructor(httpStatusCode) { - super(`Unexpected HTTP response: ${httpStatusCode}`); - this.httpStatusCode = httpStatusCode; - Object.setPrototypeOf(this, new.target.prototype); +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } } -exports.HTTPError = HTTPError; -const IS_WINDOWS = process.platform === 'win32'; -const IS_MAC = process.platform === 'darwin'; -const userAgent = 'actions/tool-cache'; -/** - * Download a tool from an url and stream it into a file - * - * @param url url of tool to download - * @param dest path to download tool - * @param auth authorization header - * @param headers other headers - * @returns path to downloaded tool - */ -function downloadTool(url, dest, auth, headers) { - return __awaiter(this, void 0, void 0, function* () { - dest = dest || path.join(_getTempDirectory(), v4_1.default()); - yield io.mkdirP(path.dirname(dest)); - core.debug(`Downloading ${url}`); - core.debug(`Destination ${dest}`); - const maxAttempts = 3; - const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); - const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); - const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); - return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { - return yield downloadToolAttempt(url, dest || '', auth, headers); - }), (err) => { - if (err instanceof HTTPError && err.httpStatusCode) { - // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests - if (err.httpStatusCode < 500 && - err.httpStatusCode !== 408 && - err.httpStatusCode !== 429) { - return false; - } - } - // Otherwise retry - return true; - }); - }); + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null } -exports.downloadTool = downloadTool; -function downloadToolAttempt(url, dest, auth, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (fs.existsSync(dest)) { - throw new Error(`Destination file path ${dest} already exists`); - } - // Get the response headers - const http = new httpm.HttpClient(userAgent, [], { - allowRetries: false - }); - if (auth) { - core.debug('set auth'); - if (headers === undefined) { - headers = {}; - } - headers.authorization = auth; - } - const response = yield http.get(url, headers); - if (response.message.statusCode !== 200) { - const err = new HTTPError(response.message.statusCode); - core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); - throw err; - } - // Download the response body - const pipeline = util.promisify(stream.pipeline); - const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); - const readStream = responseMessageFactory(); - let succeeded = false; - try { - yield pipeline(readStream, fs.createWriteStream(dest)); - core.debug('download complete'); - succeeded = true; - return dest; - } - finally { - // Error, delete dest before retry - if (!succeeded) { - core.debug('download failed'); - try { - yield io.rmRF(dest); - } - catch (err) { - core.debug(`Failed to delete '${dest}'. ${err.message}`); - } - } - } - }); + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null } -/** - * Extract a .7z file - * - * @param file path to the .7z file - * @param dest destination directory. Optional. - * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this - * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will - * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is - * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line - * interface, it is smaller than the full command line interface, and it does support long paths. At the - * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. - * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path - * to 7zr.exe can be pass to this function. - * @returns path to the destination directory - */ -function extract7z(file, dest, _7zPath) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - const originalCwd = process.cwd(); - process.chdir(dest); - if (_7zPath) { - try { - const logLevel = core.isDebug() ? '-bb1' : '-bb0'; - const args = [ - 'x', - logLevel, - '-bd', - '-sccUTF-8', - file - ]; - const options = { - silent: true - }; - yield exec_1.exec(`"${_7zPath}"`, args, options); - } - finally { - process.chdir(originalCwd); - } - } - else { - const escapedScript = path - .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') - .replace(/'/g, "''") - .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; - const args = [ - '-NoLogo', - '-Sta', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - command - ]; - const options = { - silent: true - }; - try { - const powershellPath = yield io.which('powershell', true); - yield exec_1.exec(`"${powershellPath}"`, args, options); - } - finally { - process.chdir(originalCwd); - } + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num } - return dest; - }); + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() } -exports.extract7z = extract7z; -/** - * Extract a compressed tar archive - * - * @param file path to the tar - * @param dest destination directory. Optional. - * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. - * @returns path to the destination directory - */ -function extractTar(file, dest, flags = 'xz') { - return __awaiter(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - // Create dest - dest = yield _createExtractFolder(dest); - // Determine whether GNU tar - core.debug('Checking tar --version'); - let versionOutput = ''; - yield exec_1.exec('tar --version', [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => (versionOutput += data.toString()), - stderr: (data) => (versionOutput += data.toString()) - } - }); - core.debug(versionOutput.trim()); - const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); - // Initialize args - let args; - if (flags instanceof Array) { - args = flags; - } - else { - args = [flags]; - } - if (core.isDebug() && !flags.includes('v')) { - args.push('-v'); - } - let destArg = dest; - let fileArg = file; - if (IS_WINDOWS && isGnuTar) { - args.push('--force-local'); - destArg = dest.replace(/\\/g, '/'); - // Technically only the dest needs to have `/` but for aesthetic consistency - // convert slashes in the file arg too. - fileArg = file.replace(/\\/g, '/'); - } - if (isGnuTar) { - // Suppress warnings when using GNU tar to extract archives created by BSD tar - args.push('--warning=no-unknown-keyword'); - args.push('--overwrite'); - } - args.push('-C', destArg, '-f', fileArg); - yield exec_1.exec(`tar`, args); - return dest; - }); + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version } -exports.extractTar = extractTar; -/** - * Extract a xar compatible archive - * - * @param file path to the archive - * @param dest destination directory. Optional. - * @param flags flags for the xar. Optional. - * @returns path to the destination directory - */ -function extractXar(file, dest, flags = []) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - let args; - if (flags instanceof Array) { - args = flags; - } - else { - args = [flags]; - } - args.push('-x', '-C', dest, '-f', file); - if (core.isDebug()) { - args.push('-v'); - } - const xarPath = yield io.which('xar', true); - yield exec_1.exec(`"${xarPath}"`, _unique(args)); - return dest; - }); + +SemVer.prototype.toString = function () { + return this.version } -exports.extractXar = extractXar; -/** - * Extract a zip - * - * @param file path to the zip - * @param dest destination directory. Optional. - * @returns path to the destination directory - */ -function extractZip(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - dest = yield _createExtractFolder(dest); - if (IS_WINDOWS) { - yield extractZipWin(file, dest); - } - else { - yield extractZipNix(file, dest); - } - return dest; - }); + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) } -exports.extractZip = extractZip; -function extractZipWin(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - // build the powershell command - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines - const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const pwshPath = yield io.which('pwsh', false); - //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory - //and the -Force flag for Expand-Archive as a fallback - if (pwshPath) { - //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive - const pwshCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, - `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, - `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` - ].join(' '); - const args = [ - '-NoLogo', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - pwshCommand - ]; - core.debug(`Using pwsh at path: ${pwshPath}`); - yield exec_1.exec(`"${pwshPath}"`, args); - } - else { - const powershellCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, - `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, - `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` - ].join(' '); - const args = [ - '-NoLogo', - '-Sta', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - powershellCommand - ]; - const powershellPath = yield io.which('powershell', true); - core.debug(`Using powershell at path: ${powershellPath}`); - yield exec_1.exec(`"${powershellPath}"`, args); - } - }); + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) } -function extractZipNix(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - const unzipPath = yield io.which('unzip', true); - const args = [file]; - if (!core.isDebug()) { - args.unshift('-q'); - } - args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run - yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); - }); + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) } -/** - * Caches a directory and installs it into the tool cacheDir - * - * @param sourceDir the directory to cache into tools - * @param tool tool name - * @param version version of the tool. semver format - * @param arch architecture of the tool. Optional. Defaults to machine architecture - */ -function cacheDir(sourceDir, tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - version = semver.clean(version) || version; - arch = arch || os.arch(); - core.debug(`Caching tool ${tool} ${version} ${arch}`); - core.debug(`source dir: ${sourceDir}`); - if (!fs.statSync(sourceDir).isDirectory()) { - throw new Error('sourceDir is not a directory'); + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } } - // Create the tool dir - const destPath = yield _createToolPath(tool, version, arch); - // copy each child item. do not move. move can fail on Windows - // due to anti-virus software having an open handle on a file. - for (const itemName of fs.readdirSync(sourceDir)) { - const s = path.join(sourceDir, itemName); - yield io.cp(s, destPath, { recursive: true }); + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) } - // write .complete - _completeToolPath(tool, version, arch); - return destPath; - }); -} -exports.cacheDir = cacheDir; -/** - * Caches a downloaded file (GUID) and installs it - * into the tool cache with a given targetName - * - * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. - * @param targetFile the name of the file name in the tools directory - * @param tool tool name - * @param version version of the tool. semver format - * @param arch architecture of the tool. Optional. Defaults to machine architecture - */ -function cacheFile(sourceFile, targetFile, tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - version = semver.clean(version) || version; - arch = arch || os.arch(); - core.debug(`Caching tool ${tool} ${version} ${arch}`); - core.debug(`source file: ${sourceFile}`); - if (!fs.statSync(sourceFile).isFile()) { - throw new Error('sourceFile is not a file'); + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] } - // create the tool dir - const destFolder = yield _createToolPath(tool, version, arch); - // copy instead of move. move can fail on Windows due to - // anti-virus software having an open handle on a file. - const destPath = path.join(destFolder, targetFile); - core.debug(`destination file ${destPath}`); - yield io.cp(sourceFile, destPath); - // write .complete - _completeToolPath(tool, version, arch); - return destFolder; - }); + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this } -exports.cacheFile = cacheFile; -/** - * Finds the path to a tool version in the local installed tool cache - * - * @param toolName name of the tool - * @param versionSpec version of the tool - * @param arch optional arch. defaults to arch of computer - */ -function find(toolName, versionSpec, arch) { - if (!toolName) { - throw new Error('toolName parameter is required'); - } - if (!versionSpec) { - throw new Error('versionSpec parameter is required'); - } - arch = arch || os.arch(); - // attempt to resolve an explicit version - if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions(toolName, arch); - const match = evaluateVersions(localVersions, versionSpec); - versionSpec = match; + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' } - // check for the explicit version in the cache - let toolPath = ''; - if (versionSpec) { - versionSpec = semver.clean(versionSpec) || ''; - const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); - core.debug(`checking cache: ${cachePath}`); - if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { - core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); - toolPath = cachePath; - } - else { - core.debug('not found'); + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key } + } } - return toolPath; + return defaultResult // may be undefined + } } -exports.find = find; -/** - * Finds the paths to all versions of a tool that are installed in the local tool cache - * - * @param toolName name of the tool - * @param arch optional arch. defaults to arch of computer - */ -function findAllVersions(toolName, arch) { - const versions = []; - arch = arch || os.arch(); - const toolPath = path.join(_getCacheDirectory(), toolName); - if (fs.existsSync(toolPath)) { - const children = fs.readdirSync(toolPath); - for (const child of children) { - if (isExplicitVersion(child)) { - const fullPath = path.join(toolPath, child, arch || ''); - if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { - versions.push(child); - } - } - } - } - return versions; + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 } -exports.findAllVersions = findAllVersions; -function getManifestFromRepo(owner, repo, auth, branch = 'master') { - return __awaiter(this, void 0, void 0, function* () { - let releases = []; - const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; - const http = new httpm.HttpClient('tool-cache'); - const headers = {}; - if (auth) { - core.debug('set auth'); - headers.authorization = auth; - } - const response = yield http.getJson(treeUrl, headers); - if (!response.result) { - return releases; - } - let manifestUrl = ''; - for (const item of response.result.tree) { - if (item.path === 'versions-manifest.json') { - manifestUrl = item.url; - break; - } - } - headers['accept'] = 'application/vnd.github.VERSION.raw'; - let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); - if (versionsRaw) { - // shouldn't be needed but protects against invalid json saved with BOM - versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); - try { - releases = JSON.parse(versionsRaw); - } - catch (_a) { - core.debug('Invalid json'); - } - } - return releases; - }); + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) } -exports.getManifestFromRepo = getManifestFromRepo; -function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - // wrap the internal impl - const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); - return match; - }); + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major } -exports.findFromManifest = findFromManifest; -function _createExtractFolder(dest) { - return __awaiter(this, void 0, void 0, function* () { - if (!dest) { - // create a temp dir - dest = path.join(_getTempDirectory(), v4_1.default()); - } - yield io.mkdirP(dest); - return dest; - }); + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor } -function _createToolPath(tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); - core.debug(`destination ${folderPath}`); - const markerPath = `${folderPath}.complete`; - yield io.rmRF(folderPath); - yield io.rmRF(markerPath); - yield io.mkdirP(folderPath); - return folderPath; - }); + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch } -function _completeToolPath(tool, version, arch) { - const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); - const markerPath = `${folderPath}.complete`; - fs.writeFileSync(markerPath, ''); - core.debug('finished caching tool'); + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) } -/** - * Check if version string is explicit - * - * @param versionSpec version string to check - */ -function isExplicitVersion(versionSpec) { - const c = semver.clean(versionSpec) || ''; - core.debug(`isExplicit: ${c}`); - const valid = semver.valid(c) != null; - core.debug(`explicit? ${valid}`); - return valid; + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) } -exports.isExplicitVersion = isExplicitVersion; -/** - * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec` - * - * @param versions array of versions to evaluate - * @param versionSpec semantic version spec to satisfy - */ -function evaluateVersions(versions, versionSpec) { - let version = ''; - core.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver.gt(a, b)) { - return 1; - } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver.satisfies(potential, versionSpec); - if (satisfied) { - version = potential; - break; - } - } - if (version) { - core.debug(`matched: ${version}`); - } - else { - core.debug('match not found'); - } - return version; + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) } -exports.evaluateVersions = evaluateVersions; -/** - * Gets RUNNER_TOOL_CACHE - */ -function _getCacheDirectory() { - const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; - assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); - return cacheDirectory; + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) } -/** - * Gets RUNNER_TEMP - */ -function _getTempDirectory() { - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - return tempDirectory; + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) } -/** - * Gets a global variable - */ -function _getGlobal(key, defaultValue) { - /* eslint-disable @typescript-eslint/no-explicit-any */ - const value = global[key]; - /* eslint-enable @typescript-eslint/no-explicit-any */ - return value !== undefined ? value : defaultValue; + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) } -/** - * Returns an array of unique values. - * @param values Values to make unique. - */ -function _unique(values) { - return Array.from(new Set(values)); + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 } -//# sourceMappingURL=tool-cache.js.map -/***/ }), +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} -/***/ 562: -/***/ ((module, exports) => { +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} -exports = module.exports = SemVer +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 } -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + case '': + case '=': + case '==': + return eq(a, b, loose) -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 + case '!=': + return neq(a, b, loose) -function tok (n) { - t[n] = R++ -} + case '>': + return gt(a, b, loose) -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + case '>=': + return gte(a, b, loose) -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] + case '<': + return lt(a, b, loose) -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) } - return value } -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) -// ## Main Version -// Three dot-separated numeric identifiers. + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' + debug('comp', this) +} -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. +Comparator.prototype.toString = function () { + return this.value +} -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + if (this.semver === ANY || version === ANY) { + return true + } -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + return cmp(version, this.operator, this.semver, this.options) +} -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. + var rangeTmp -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' + if (range instanceof Comparator) { + return new Range(range.value, options) + } -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + if (!(this instanceof Range)) { + return new Range(range, options) + } -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) + } -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' + this.format() +} -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' - -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' - -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' +Range.prototype.toString = function () { + return this.range +} -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' + // normalize spaces + range = range.split(/\s+/).join(' ') -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' + // At this point, the range is completely trimmed and + // ready to be split into comparators. -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) + return set +} - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) } -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() - if (version instanceof SemVer) { - return version - } + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) - if (typeof version !== 'string') { - return null + testComparator = remainingComparators.pop() } - if (version.length > MAX_LENGTH) { - return null - } + return result +} - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null - } +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} - try { - return new SemVer(version, options) - } catch (er) { - return null - } +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp } -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' } -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') } -exports.SemVer = SemVer +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' } else { - version = version.version + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } + debug('tilde return', ret) + return ret + }) +} - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } + debug('caret return', ret) + return ret + }) +} - this.raw = version +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } + if (gtlt === '=' && anyX) { + gtlt = '' + } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 } } - return id - }) - } - this.build = m[5] ? m[5].split('.') : [] - this.format() -} + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version + debug('xRange return', ret) + + return ret + }) } -SemVer.prototype.toString = function () { - return this.version +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') } -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from } - return this.compareMain(other) || this.comparePre(other) -} - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to } - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) + return (from + ' ' + to).trim() } -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false } - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } } - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true } - } while (++i) -} - -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) } + return false +} - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false } - } while (++i) -} + } -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue } - this.inc('pre', identifier) - break - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true } } - break + } - default: - throw new Error('invalid increment argument: ' + release) + // Version has a -pre, but it's not one of the ones we like. + return false } - this.format() - this.raw = this.version - return this + + return true } -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false } + return range.test(version) +} +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null try { - return new SemVer(version, loose).inc(release, identifier).version + var rangeObj = new Range(range, options) } catch (er) { return null } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max } -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) } } - return defaultResult // may be undefined - } + }) + return min } -exports.compareIdentifiers = compareIdentifiers +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } - if (anum && bnum) { - a = +a - b = +b + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver } - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} + if (minver && range.test(minver)) { + return minver + } -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor + return null } -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } } -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) } -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) } -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) - }) -} + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) -} + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} + var high = null + var low = null -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} - case '<=': - return lte(a, b, loose) +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } - default: - throw new TypeError('Invalid operator: ' + op) + if (typeof version === 'number') { + version = String(version) } -} -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } + if (typeof version !== 'string') { + return null } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) + if (match === null) { + return null } - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } +/***/ }), - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } +/***/ 7701: +/***/ ((module) => { - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); } -Comparator.prototype.toString = function () { - return this.value +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } +module.exports = bytesToUuid; - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - return cmp(version, this.operator, this.semver, this.options) -} +/***/ }), -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } +/***/ 7269: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. - var rangeTmp +var crypto = __nccwpck_require__(6113); - if (this.operator === '') { - if (this.value === '') { - return true - } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true - } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} +/***/ }), -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +/***/ 7468: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } +var rng = __nccwpck_require__(7269); +var bytesToUuid = __nccwpck_require__(7701); - if (range instanceof Comparator) { - return new Range(range.value, options) - } +function v4(options, buf, offset) { + var i = buf && offset || 0; - if (!(this instanceof Range)) { - return new Range(range, options) + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; } + options = options || {}; - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') + var rnds = options.random || (options.rng || rng)(); - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } } - this.format() + return buf || bytesToUuid(rnds); } -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} +module.exports = v4; -Range.prototype.toString = function () { - return this.range -} -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) +/***/ }), - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) +/***/ 2557: +/***/ ((__unused_webpack_module, exports) => { - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) +"use strict"; - // normalize spaces - range = range.split(/\s+/).join(' ') - // At this point, the range is completely trimmed and - // ready to be split into comparators. +Object.defineProperty(exports, "__esModule", ({ value: true })); - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} - return set +// Copyright (c) Microsoft Corporation. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } } -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } +exports.AbortController = AbortController; +exports.AbortError = AbortError; +exports.AbortSignal = AbortSignal; +//# sourceMappingURL=index.js.map - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) -} -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() +/***/ }), - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) +/***/ 9645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - testComparator = remainingComparators.pop() - } +"use strict"; - return result -} -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret +var coreUtil = __nccwpck_require__(1333); - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') } -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' +// Copyright (c) Microsoft Corporation. +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } + this._name = newName; + this._key = newKey; } - - debug('caret return', ret) - return ret - }) } - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return (coreUtil.isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); } -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' +// Copyright (c) Microsoft Corporation. +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); } - } - - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr + this._signature = newSignature; } - - debug('xRange return', ret) - - return ret - }) } - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return (coreUtil.isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); } -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); } -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } +exports.AzureKeyCredential = AzureKeyCredential; +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +exports.AzureSASCredential = AzureSASCredential; +exports.isNamedKeyCredential = isNamedKeyCredential; +exports.isSASCredential = isSASCredential; +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=index.js.map - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} +/***/ }), -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } +/***/ 4607: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } +"use strict"; - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - // Version has a -pre, but it's not one of the ones we like. - return false - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - return true -} +var uuid = __nccwpck_require__(3415); +var util = __nccwpck_require__(3837); +var tslib = __nccwpck_require__(4351); +var xml2js = __nccwpck_require__(6189); +var coreUtil = __nccwpck_require__(1333); +var logger$1 = __nccwpck_require__(3233); +var coreAuth = __nccwpck_require__(9645); +var os = __nccwpck_require__(2037); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var abortController = __nccwpck_require__(2557); +var tunnel = __nccwpck_require__(4294); +var stream = __nccwpck_require__(2781); +var FormData = __nccwpck_require__(6279); +var node_fetch = __nccwpck_require__(467); +var coreTracing = __nccwpck_require__(4175); -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); } - }) - return max + n["default"] = e; + return Object.freeze(n); } -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) -} - - -/***/ }), - -/***/ 7701: -/***/ ((module) => { +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + * A collection of HttpHeaders that can be sent with a HTTP request. */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); +function getHeaderKey(headerName) { + return headerName.toLowerCase(); } - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 7269: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(6113); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 7468: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(7269); -var bytesToUuid = __nccwpck_require__(7701); - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; +function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } } - } - - return buf || bytesToUuid(rnds); + return false; } - -module.exports = v4; - - -/***/ }), - -/***/ 2557: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/// -const listenersMap = new WeakMap(); -const abortedMap = new WeakMap(); /** - * An aborter instance implements AbortSignal interface, can abort HTTP requests. - * - * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. - * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation - * cannot or will not ever be cancelled. - * - * @example - * Abort without timeout - * ```ts - * await doAsyncWork(AbortSignal.none); - * ``` + * A collection of HTTP header key/value pairs. */ -class AbortSignal { - constructor() { - /** - * onabort event listener. - */ - this.onabort = null; - listenersMap.set(this, []); - abortedMap.set(this, false); +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } } /** - * Status of whether aborted or not. - * - * @readonly + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. */ - get aborted() { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString().trim(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); } - return abortedMap.get(this); + return headers; } /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly + * Get the header names that are contained in this collection. */ - static get none() { - return new AbortSignal(); + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; } /** - * Added new "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be added + * Get the header values that are contained in this collection. */ - addEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); } - const listeners = listenersMap.get(this); - listeners.push(listener); + return headerValues; } /** - * Remove "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be removed + * Get the JSON object representation of this HTTP header collection. */ - removeEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } } - const listeners = listenersMap.get(this); - const index = listeners.indexOf(listener); - if (index > -1) { - listeners.splice(index, 1); + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } } + return result; } /** - * Dispatches a synthetic event to the AbortSignal. + * Get the string representation of this HTTP header collection. */ - dispatchEvent(_event) { - throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. - * Will try to trigger abort event for all linked AbortSignal nodes. - * - * - If there is a timeout, the timer will be cancelled. - * - If aborted is true, nothing will happen. - * - * @internal + * Encodes a string in base64 format. + * @param value - The string to encode */ -// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters -function abortSignal(signal) { - if (signal.aborted) { - return; - } - if (signal.onabort) { - signal.onabort.call(signal); - } - const listeners = listenersMap.get(signal); - if (listeners) { - // Create a copy of listeners so mutations to the array - // (e.g. via removeListener calls) don't affect the listeners - // we invoke. - listeners.slice().forEach((listener) => { - listener.call(signal, { type: "abort" }); - }); - } - abortedMap.set(signal, true); +function encodeString(value) { + return Buffer.from(value).toString("base64"); } - -// Copyright (c) Microsoft Corporation. /** - * This error is thrown when an asynchronous operation has been aborted. - * Check for this error by testing the `name` that the name property of the - * error matches `"AbortError"`. - * - * @example - * ```ts - * const controller = new AbortController(); - * controller.abort(); - * try { - * doAsyncWork(controller.signal) - * } catch (e) { - * if (e.name === 'AbortError') { - * // handle abort error here. - * } - * } - * ``` + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode */ -class AbortError extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); } /** - * An AbortController provides an AbortSignal and the associated controls to signal - * that an asynchronous operation should be aborted. - * - * @example - * Abort an operation when another event fires - * ```ts - * const controller = new AbortController(); - * const signal = controller.signal; - * doAsyncWork(signal); - * button.addEventListener('click', () => controller.abort()); - * ``` - * - * @example - * Share aborter cross multiple operations in 30s - * ```ts - * // Upload the same data to 2 different data centers at the same time, - * // abort another when any of them is finished - * const controller = AbortController.withTimeout(30 * 1000); - * doAsyncWork(controller.signal).then(controller.abort); - * doAsyncWork(controller.signal).then(controller.abort); - *``` - * - * @example - * Cascaded aborting - * ```ts - * // All operations can't take more than 30 seconds - * const aborter = Aborter.timeout(30 * 1000); - * - * // Following 2 operations can't take more than 25 seconds - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * ``` + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode */ -class AbortController { - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - constructor(parentSignals) { - this._signal = new AbortSignal(); - if (!parentSignals) { - return; - } - // coerce parentSignals into an array - if (!Array.isArray(parentSignals)) { - // eslint-disable-next-line prefer-rest-params - parentSignals = arguments; - } - for (const parentSignal of parentSignals) { - // if the parent signal has already had abort() called, - // then call abort on this signal as well. - if (parentSignal.aborted) { - this.abort(); - } - else { - // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", () => { - this.abort(); - }); - } - } - } - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get signal() { - return this._signal; - } - /** - * Signal that any operations passed this controller's associated abort signal - * to cancel any remaining work and throw an `AbortError`. - */ - abort() { - abortSignal(this._signal); - } - /** - * Creates a new AbortSignal instance that will abort after the provided ms. - * @param ms - Elapsed time in milliseconds to trigger an abort. - */ - static timeout(ms) { - const signal = new AbortSignal(); - const timer = setTimeout(abortSignal, ms, signal); - // Prevent the active Timer from keeping the Node.js event loop active. - if (typeof timer.unref === "function") { - timer.unref(); - } - return signal; - } +function decodeString(value) { + return Buffer.from(value, "base64"); } -exports.AbortController = AbortController; -exports.AbortError = AbortError; -exports.AbortSignal = AbortSignal; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9645: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * A static-key-based credential that supports updating - * the underlying key value. + * A set of constants used internally when processing requests. */ -class AzureKeyCredential { +const Constants = { /** - * Create an instance of an AzureKeyCredential for use - * with a service client. - * - * @param key - The initial value of the key to use in authentication + * The core-http version */ - constructor(key) { - if (!key) { - throw new Error("key must be a non-empty string"); - } - this._key = key; - } + coreHttpVersion: "3.0.4", /** - * The value of the key to be used in authentication + * Specifies HTTP. */ - get key() { - return this._key; - } + HTTP: "http:", /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newKey - The new key value to be used + * Specifies HTTPS. */ - update(newKey) { - this._key = newKey; - } -} + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything - * @internal + * Default key used to access the XML attributes. */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; +const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +const XML_CHARKEY = "_"; + +// Copyright (c) Microsoft Corporation. +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); } /** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * Note: The properties may be inherited. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. - * @internal + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; - } +function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); } - return true; + return strippedRequest; } /** - * Helper TypeGuard that checks if the input is an object with the specified property. - * Note: The property may be inherited. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. - * @internal + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. */ -function objectHasProperty(thing, property) { - return typeof thing === "object" && property in thing; +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); } - -// Copyright (c) Microsoft Corporation. /** - * A static name/key-based credential that supports updating - * the underlying name and key values. + * Generated UUID + * + * @returns RFC4122 v4 UUID. */ -class AzureNamedKeyCredential { - /** - * Create an instance of an AzureNamedKeyCredential for use - * with a service client. - * - * @param name - The initial value of the name to use in authentication. - * @param key - The initial value of the key to use in authentication. - */ - constructor(name, key) { - if (!name || !key) { - throw new TypeError("name and key must be non-empty strings"); - } - this._name = name; - this._key = key; - } - /** - * The value of the key to be used in authentication. - */ - get key() { - return this._key; - } - /** - * The value of the name to be used in authentication. - */ - get name() { - return this._name; - } - /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newName - The new name value to be used. - * @param newKey - The new key value to be used. - */ - update(newName, newKey) { - if (!newName || !newKey) { - throw new TypeError("newName and newKey must be non-empty strings"); - } - this._name = newName; - this._key = newKey; - } +function generateUuid() { + return uuid.v4(); } /** - * Tests an object to determine whether it implements NamedKeyCredential. + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! * - * @param credential - The assumed NamedKeyCredential to be tested. + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises */ -function isNamedKeyCredential(credential) { - return (isObjectWithProperties(credential, ["name", "key"]) && - typeof credential.key === "string" && - typeof credential.name === "string"); +function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; } - -// Copyright (c) Microsoft Corporation. /** - * A static-signature-based credential that supports updating - * the underlying signature value. + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody */ -class AzureSASCredential { - /** - * Create an instance of an AzureSASCredential for use - * with a service client. - * - * @param signature - The initial value of the shared access signature to use in authentication - */ - constructor(signature) { - if (!signature) { - throw new Error("shared access signature must be a non-empty string"); - } - this._signature = signature; +// eslint-disable-next-line @typescript-eslint/ban-types +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); } - /** - * The value of the shared access signature to be used in authentication - */ - get signature() { - return this._signature; + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); } - /** - * Change the value of the signature. - * - * Updates will take effect upon the next request after - * updating the signature value. - * - * @param newSignature - The new shared access signature value to be used - */ - update(newSignature) { - if (!newSignature) { - throw new Error("shared access signature must be a non-empty string"); - } - this._signature = newSignature; + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; } /** - * Tests an object to determine whether it implements SASCredential. - * - * @param credential - The assumed SASCredential to be tested. + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. */ -function isSASCredential(credential) { - return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** - * Tests an object to determine whether it implements TokenCredential. - * - * @param credential - The assumed TokenCredential to be tested. + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. */ -function isTokenCredential(credential) { - // Check for an object with a 'getToken' function and possibly with - // a 'signRequest' function. We do this check to make sure that - // a ServiceClientCredentials implementor (like TokenClientCredentials - // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if - // it doesn't actually implement TokenCredential also. - const castCredential = credential; - return (castCredential && - typeof castCredential.getToken === "function" && - (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); -} - -exports.AzureKeyCredential = AzureKeyCredential; -exports.AzureNamedKeyCredential = AzureNamedKeyCredential; -exports.AzureSASCredential = AzureSASCredential; -exports.isNamedKeyCredential = isNamedKeyCredential; -exports.isSASCredential = isSASCredential; -exports.isTokenCredential = isTokenCredential; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4607: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var uuid = __nccwpck_require__(5840); -var util = __nccwpck_require__(3837); -var tslib = __nccwpck_require__(4351); -var xml2js = __nccwpck_require__(6189); -var coreUtil = __nccwpck_require__(1333); -var logger$1 = __nccwpck_require__(3233); -var coreAuth = __nccwpck_require__(9645); -var os = __nccwpck_require__(2037); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var abortController = __nccwpck_require__(2557); -var tunnel = __nccwpck_require__(4294); -var stream = __nccwpck_require__(2781); -var FormData = __nccwpck_require__(6279); -var node_fetch = __nccwpck_require__(467); -var coreTracing = __nccwpck_require__(4175); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } - -function _interopNamespace(e) { - if (e && e.__esModule) return e; - var n = Object.create(null); - if (e) { - Object.keys(e).forEach(function (k) { - if (k !== 'default') { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function () { return e[k]; } - }); - } - }); - } - n["default"] = e; - return Object.freeze(n); -} - -var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var http__namespace = /*#__PURE__*/_interopNamespace(http); -var https__namespace = /*#__PURE__*/_interopNamespace(https); -var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); -var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); -var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A collection of HttpHeaders that can be sent with a HTTP request. - */ -function getHeaderKey(headerName) { - return headerName.toLowerCase(); -} -function isHttpHeadersLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.rawHeaders === "function" && - typeof castObject.clone === "function" && - typeof castObject.get === "function" && - typeof castObject.set === "function" && - typeof castObject.contains === "function" && - typeof castObject.remove === "function" && - typeof castObject.headersArray === "function" && - typeof castObject.headerValues === "function" && - typeof castObject.headerNames === "function" && - typeof castObject.toJson === "function") { - return true; - } - } - return false; -} -/** - * A collection of HTTP header key/value pairs. - */ -class HttpHeaders { - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } - } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString(), - }; - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? undefined : header.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; - } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; - } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); - } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); - } - return headers; - } - /** - * Get the header names that are contained in this collection. - */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); - } - return headerNames; - } - /** - * Get the header values that are contained in this collection. - */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); - } - return headerValues; - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; - } - } - else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; - } - } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); - } - /** - * Create a deep clone/copy of this HttpHeaders collection. - */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Encodes a string in base64 format. - * @param value - The string to encode - */ -function encodeString(value) { - return Buffer.from(value).toString("base64"); -} -/** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode - */ -function encodeByteArray(value) { - // Buffer.from accepts | -- the TypeScript definition is off here - // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); -} -/** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode - */ -function decodeString(value) { - return Buffer.from(value, "base64"); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - /** - * The core-http version - */ - coreHttpVersion: "3.0.1", - /** - * Specifies HTTP. - */ - HTTP: "http:", - /** - * Specifies HTTPS. - */ - HTTPS: "https:", - /** - * Specifies HTTP Proxy. - */ - HTTP_PROXY: "HTTP_PROXY", - /** - * Specifies HTTPS Proxy. - */ - HTTPS_PROXY: "HTTPS_PROXY", - /** - * Specifies NO Proxy. - */ - NO_PROXY: "NO_PROXY", - /** - * Specifies ALL Proxy. - */ - ALL_PROXY: "ALL_PROXY", - HttpConstants: { - /** - * Http Verbs - */ - HttpVerbs: { - PUT: "PUT", - GET: "GET", - DELETE: "DELETE", - POST: "POST", - MERGE: "MERGE", - HEAD: "HEAD", - PATCH: "PATCH", - }, - StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503, - }, - }, - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - AUTHORIZATION_SCHEME: "Bearer", - /** - * The Retry-After response-header field can be used with a 503 (Service - * Unavailable) or 349 (Too Many Requests) responses to indicate how long - * the service is expected to be unavailable to the requesting client. - */ - RETRY_AFTER: "Retry-After", - /** - * The UserAgent header. - */ - USER_AGENT: "User-Agent", - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Default key used to access the XML attributes. - */ -const XML_ATTRKEY = "$"; -/** - * Default key used to access the XML value content. - */ -const XML_CHARKEY = "_"; - -// Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; -/** - * A constant that indicates whether the environment is node.js or browser based. - */ -const isNode = typeof process !== "undefined" && - !!process.version && - !!process.versions && - !!process.versions.node; -/** - * Encodes an URI. - * - * @param uri - The URI to be encoded. - * @returns The encoded URI. - */ -function encodeUri(uri) { - return encodeURIComponent(uri) - .replace(/!/g, "%21") - .replace(/"/g, "%27") - .replace(/\(/g, "%28") - .replace(/\)/g, "%29") - .replace(/\*/g, "%2A"); -} -/** - * Returns a stripped version of the Http Response which only contains body, - * headers and the status. - * - * @param response - The Http Response - * @returns The stripped version of Http Response. - */ -function stripResponse(response) { - const strippedResponse = {}; - strippedResponse.body = response.bodyAsText; - strippedResponse.headers = response.headers; - strippedResponse.status = response.status; - return strippedResponse; -} -/** - * Returns a stripped version of the Http Request that does not contain the - * Authorization header. - * - * @param request - The Http Request object - * @returns The stripped version of Http Request. - */ -function stripRequest(request) { - const strippedRequest = request.clone(); - if (strippedRequest.headers) { - strippedRequest.headers.remove("authorization"); - } - return strippedRequest; -} -/** - * Validates the given uuid as a string - * - * @param uuid - The uuid as a string that needs to be validated - * @returns True if the uuid is valid; false otherwise. - */ -function isValidUuid(uuid) { - return validUuidRegex.test(uuid); -} -/** - * Generated UUID - * - * @returns RFC4122 v4 UUID. - */ -function generateUuid() { - return uuid.v4(); -} -/** - * Executes an array of promises sequentially. Inspiration of this method is here: - * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! - * - * @param promiseFactories - An array of promise factories(A function that return a promise) - * @param kickstart - Input to the first promise that is used to kickstart the promise chain. - * If not provided then the promise chain starts with undefined. - * @returns A chain of resolved or rejected promises - */ -function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { - result = result.then(promiseFactory); - }); - return result; -} -/** - * Converts a Promise to a callback. - * @param promise - The Promise to be converted to a callback - * @returns A function that takes the callback `(cb: Function) => void` - * @deprecated generated code should instead depend on responseToBody - */ -// eslint-disable-next-line @typescript-eslint/ban-types -function promiseToCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { - promise - .then((data) => { - // eslint-disable-next-line promise/no-callback-in-promise - return cb(undefined, data); - }) - .catch((err) => { - // eslint-disable-next-line promise/no-callback-in-promise - cb(err); - }); - }; -} -/** - * Converts a Promise to a service callback. - * @param promise - The Promise of HttpOperationResponse to be converted to a service callback - * @returns A function that takes the service callback (cb: ServiceCallback): void - */ -function promiseToServiceCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - return (cb) => { - promise - .then((data) => { - return process.nextTick(cb, undefined, data.parsedBody, data.request, data); - }) - .catch((err) => { - process.nextTick(cb, err); - }); - }; -} -function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; - } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; - } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; -} -/** - * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor - * @param targetCtor - The target object on which the properties need to be applied. - * @param sourceCtors - An array of source objects from which the properties need to be taken. - */ -function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { - castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; - }); - }); -} -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; -/** - * Indicates whether the given string is in ISO 8601 format. - * @param value - The value to be validated for ISO 8601 duration format. - * @returns `true` if valid, `false` otherwise. - */ -function isDuration(value) { - return validateISODuration.test(value); +function isDuration(value) { + return validateISODuration.test(value); } /** * Replace all of the instances of searchValue in value with the provided replaceValue. @@ -19086,7 +17817,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(throttlingRetryPolicy()); } factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (isNode) { + if (coreUtil.isNode) { factories.push(proxyPolicy(options.proxySettings)); } factories.push(logPolicy({ logger: logger.info })); @@ -19118,7 +17849,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (isNode) { + if (coreUtil.isNode) { requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); } const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); @@ -19131,7 +17862,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { requestPolicyFactories.push(authPolicyFactory); } requestPolicyFactories.push(logPolicy(loggingOptions)); - if (isNode && pipelineOptions.decompressResponse === false) { + if (coreUtil.isNode && pipelineOptions.decompressResponse === false) { requestPolicyFactories.push(disableResponseDecompressionPolicy()); } return { @@ -19262,10 +17993,7 @@ function flattenResponse(_response, responseSpec) { } function getCredentialScopes(options, baseUri) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - const scopes = options.credentialScopes; - return Array.isArray(scopes) - ? scopes.map((scope) => new URL(scope).toString()) - : new URL(scopes).toString(); + return options.credentialScopes; } if (baseUri) { return `${baseUri}/.default`; @@ -19498,6 +18226,10 @@ Object.defineProperty(exports, "delay", ({ enumerable: true, get: function () { return coreUtil.delay; } })); +Object.defineProperty(exports, "isNode", ({ + enumerable: true, + get: function () { return coreUtil.isNode; } +})); Object.defineProperty(exports, "isTokenCredential", ({ enumerable: true, get: function () { return coreAuth.isTokenCredential; } @@ -19537,7 +18269,6 @@ exports.generateUuid = generateUuid; exports.getDefaultProxySettings = getDefaultProxySettings; exports.getDefaultUserAgentValue = getDefaultUserAgentValue; exports.isDuration = isDuration; -exports.isNode = isNode; exports.isValidUuid = isValidUuid; exports.keepAlivePolicy = keepAlivePolicy; exports.logPolicy = logPolicy; @@ -20086,232 +18817,915 @@ module.exports = function(dst, src) { /***/ }), -/***/ 7094: +/***/ 3415: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); -var logger$1 = __nccwpck_require__(3233); -var abortController = __nccwpck_require__(2557); -var coreUtil = __nccwpck_require__(1333); +var _v = _interopRequireDefault(__nccwpck_require__(4757)); -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); +var _v2 = _interopRequireDefault(__nccwpck_require__(9982)); -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The default time interval to wait before sending the next polling request. - */ -const POLL_INTERVAL_IN_MS = 2000; -/** - * The closed set of terminal states. - */ -const terminalStates = ["succeeded", "canceled", "failed"]; +var _v3 = _interopRequireDefault(__nccwpck_require__(5393)); -// Copyright (c) Microsoft Corporation. -/** - * Deserializes the state - */ -function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } - catch (e) { - throw new Error(`Unable to deserialize input state: ${serializedState}`); - } +var _v4 = _interopRequireDefault(__nccwpck_require__(8788)); + +var _nil = _interopRequireDefault(__nccwpck_require__(657)); + +var _version = _interopRequireDefault(__nccwpck_require__(7909)); + +var _validate = _interopRequireDefault(__nccwpck_require__(4418)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4794)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7079)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 4153: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); } -function setStateError(inputs) { - const { state, stateProxy, isOperationError } = inputs; - return (error) => { - if (isOperationError(error)) { - stateProxy.setError(state, error); - stateProxy.setFailed(state); - } - throw error; - }; + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 657: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(4418)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } -function processOperationStatus(result) { - const { state, stateProxy, status, isDone, processResult, response, setErrorAsResult } = result; - switch (status) { - case "succeeded": { - stateProxy.setSucceeded(state); - break; - } - case "failed": { - stateProxy.setError(state, new Error(`The long-running operation has failed`)); - stateProxy.setFailed(state); - break; - } - case "canceled": { - stateProxy.setCanceled(state); - break; - } - } - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || - (isDone === undefined && - ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 690: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -function buildResult(inputs) { - const { processResult, response, state } = inputs; - return processResult ? processResult(response, state) : response; + +/***/ }), + +/***/ 6631: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); } + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 4794: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(4418)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + /** - * Initiates the long-running operation. + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; - const { operationLocation, resourceLocation, metadata, response } = await init(); - if (operationLocation) - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - const config = { - metadata, - operationLocation, - resourceLocation, - }; - logger.verbose(`LRO: Operation description:`, config); - const state = stateProxy.initState(config); - const status = getOperationStatus({ response, state, operationLocation }); - processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); - return state; -} -async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; - const response = await poll(operationLocation, options).catch(setStateError({ - state, - stateProxy, - isOperationError, - })); - const status = getOperationStatus(response, state); - logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); - if (status === "succeeded") { - const resourceLocation = getResourceLocation(response, state); - if (resourceLocation !== undefined) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), - status, - }; - } - } - return { response, status }; -} -/** Polls the long-running operation. */ -async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, updateState, setDelay, isDone, setErrorAsResult, } = inputs; - const { operationLocation } = state.config; - if (operationLocation !== undefined) { - const { response, status } = await pollOperationHelper({ - poll, - getOperationStatus, - state, - stateProxy, - operationLocation, - getResourceLocation, - isOperationError, - options, - }); - processOperationStatus({ - status, - response, - state, - stateProxy, - isDone, - processResult, - setErrorAsResult, - }); - if (!terminalStates.includes(status)) { - const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); - if (intervalInMs) - setDelay(intervalInMs); - const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); - if (location !== undefined) { - const isUpdated = operationLocation !== location; - state.config.operationLocation = location; - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); - } - else - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - } - updateState === null || updateState === void 0 ? void 0 : updateState(state, response); - } -} +const byteToHex = []; -// Copyright (c) Microsoft Corporation. -function getOperationLocationPollingUrl(inputs) { - const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; -} -function getLocationHeader(rawResponse) { - return rawResponse.headers["location"]; -} -function getOperationLocationHeader(rawResponse) { - return rawResponse.headers["operation-location"]; +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); } -function getAzureAsyncOperationHeader(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; } -function findResourceLocation(inputs) { - const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; - switch (requestMethod) { - case "PUT": { - return requestPath; - } - case "DELETE": { - return undefined; - } - default: { - switch (resourceLocationConfig) { - case "azure-async-operation": { - return undefined; - } - case "original-uri": { - return requestPath; - } - case "location": - default: { - return location; - } - } - } + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 4757: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(979)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4794)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } -} -function inferLroMode(inputs) { - const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; - const operationLocation = getOperationLocationHeader(rawResponse); - const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); - const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); - const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); - if (pollingUrl !== undefined) { - return { - mode: "OperationLocation", - operationLocation: pollingUrl, - resourceLocation: findResourceLocation({ - requestMethod: normalizedRequestMethod, - location, - requestPath, - resourceLocationConfig, - }), - }; + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } - else if (location !== undefined) { - return { - mode: "ResourceLocation", - operationLocation: location, - }; + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 9982: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4085)); + +var _md = _interopRequireDefault(__nccwpck_require__(4153)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 4085: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(4794)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7079)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 5393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(979)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4794)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 8788: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4085)); + +var _sha = _interopRequireDefault(__nccwpck_require__(6631)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 4418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(690)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 7909: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(4418)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 7094: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var logger$1 = __nccwpck_require__(3233); +var abortController = __nccwpck_require__(2557); +var coreUtil = __nccwpck_require__(1333); + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The default time interval to wait before sending the next polling request. + */ +const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +const terminalStates = ["succeeded", "canceled", "failed"]; + +// Copyright (c) Microsoft Corporation. +/** + * Deserializes the state + */ +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } + else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + } +} + +// Copyright (c) Microsoft Corporation. +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + var _a; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; } else if (normalizedRequestMethod === "PUT" && requestPath) { return { @@ -20383,6 +19797,18 @@ function parseRetryAfter({ rawResponse }) { } return undefined; } +function getErrorFromResponse(response) { + const error = response.flatResponse.error; + if (!error) { + logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} function calculatePollingIntervalFromDate(retryAfterDate) { const timeNow = Math.floor(new Date().getTime()); const retryAfterTime = retryAfterDate.getTime(); @@ -20490,6 +19916,7 @@ async function pollHttpOperation(inputs) { processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, updateState, getPollingInterval: parseRetryAfter, getOperationLocation, @@ -20531,7 +19958,7 @@ const createStateProxy$1 = () => ({ * Returns a poller factory. */ function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, resolveOnUnsuccessful, } = inputs; + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; return async ({ init, poll }, options) => { const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; const stateProxy = createStateProxy$1(); @@ -20636,6 +20063,7 @@ function buildCreatePoller(inputs) { getOperationStatus: getStatusFromPollResponse, getResourceLocation, processResult, + getError, updateState, options: pollOptions, setDelay: (pollIntervalInMs) => { @@ -20674,6 +20102,7 @@ async function createHttpPoller(lro, options) { getOperationLocation, getResourceLocation, getPollingInterval: parseRetryAfter, + getError: getErrorFromResponse, resolveOnUnsuccessful, })({ init: async () => { @@ -21555,20 +20984,11 @@ exports.setSpanContext = setSpanContext; "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); - var abortController = __nccwpck_require__(2557); var crypto = __nccwpck_require__(6113); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var _a$1; -/** - * A constant that indicates whether the environment the code is running is Node.JS. - */ -const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_a$1 = process.versions) === null || _a$1 === void 0 ? void 0 : _a$1.node); - -// Copyright (c) Microsoft Corporation. /** * Creates an abortable promise. * @param buildPromise - A function that takes the resolve and reject functions as parameters. @@ -21609,6 +21029,7 @@ function createAbortablePromise(buildPromise, options) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. const StandardAbortMessage = "The delay was aborted."; /** * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. @@ -21628,6 +21049,27 @@ function delay(timeInMs, options) { }); } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; + const aborter = new abortController.AbortController(); + function abortHandler() { + aborter.abort(); + } + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); + } +} + // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -21664,6 +21106,7 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Typeguard for an error object shape (has name and message) * @param e - Something caught by a catch clause. @@ -21704,6 +21147,7 @@ function getErrorMessage(e) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Generates a SHA-256 HMAC signature. * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. @@ -21759,11 +21203,50 @@ function objectHasProperty(thing, property) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var _a; +/* + * NOTE: When moving this file, please update "react-native" section in package.json. + */ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; + } + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; + } + } + return uuid; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var _a$1; // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" +let uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : crypto.randomUUID; +// Not defined in earlier versions of Node.js 14 +if (!uuidFunction) { + uuidFunction = generateUUID; +} /** * Generated Universally Unique Identifier * @@ -21773,19 +21256,88 @@ function randomUUID() { return uuidFunction(); } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var _a, _b, _c, _d; +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +const isWebWorker = typeof self === "object" && + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +const isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +const isNode = typeof process !== "undefined" && + Boolean(process.version) && + Boolean((_d = process.versions) === null || _d === void 0 ? void 0 : _d.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !isDeno; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} + +exports.cancelablePromiseRace = cancelablePromiseRace; exports.computeSha256Hash = computeSha256Hash; exports.computeSha256Hmac = computeSha256Hmac; exports.createAbortablePromise = createAbortablePromise; exports.delay = delay; exports.getErrorMessage = getErrorMessage; exports.getRandomIntegerInclusive = getRandomIntegerInclusive; +exports.isBrowser = isBrowser; +exports.isBun = isBun; exports.isDefined = isDefined; +exports.isDeno = isDeno; exports.isError = isError; exports.isNode = isNode; exports.isObject = isObject; exports.isObjectWithProperties = isObjectWithProperties; +exports.isReactNative = isReactNative; +exports.isWebWorker = isWebWorker; exports.objectHasProperty = objectHasProperty; exports.randomUUID = randomUUID; +exports.stringToUint8Array = stringToUint8Array; +exports.uint8ArrayToString = uint8ArrayToString; //# sourceMappingURL=index.js.map @@ -23349,7 +22901,8 @@ const BlobPropertiesInternal = { name: "Enum", allowedValues: [ "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool" + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold" ] } }, @@ -30519,7 +30072,7 @@ const timeoutInSeconds = { const version = { parameterPath: "version", mapper: { - defaultValue: "2021-12-02", + defaultValue: "2023-11-03", isConstant: true, serializedName: "x-ms-version", type: { @@ -35350,8 +34903,8 @@ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.13.0"; -const SERVICE_VERSION = "2021-12-02"; +const SDK_VERSION = "12.17.0"; +const SERVICE_VERSION = "2023-11-03"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; @@ -35715,7 +35268,11 @@ function extractConnectionStringParts(connectionString) { else { // SAS connection string const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - const accountName = getAccountNameFromUrl(blobEndpoint); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } if (!blobEndpoint) { throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); } @@ -36009,10 +35566,10 @@ function isIpEndpointStyle(parsedUrl) { } const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. - // Case 2: localhost(:port), use broad regex to match port part. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. - return (/^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); } /** @@ -36188,9 +35745,7 @@ function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { var _a; return Object.assign(Object.assign({}, internalResponse), { segment: { blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = { - name: BlobNameToString(blobPrefixInternal.name), - }; + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }), blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { @@ -37209,7 +36764,7 @@ class StorageSharedKeyCredential extends Credential { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const packageName = "azure-storage-blob"; -const packageVersion = "12.13.0"; +const packageVersion = "12.17.0"; class StorageClientContext extends coreHttp__namespace.ServiceClient { /** * Initializes a new instance of the StorageClientContext class. @@ -37235,7 +36790,7 @@ class StorageClientContext extends coreHttp__namespace.ServiceClient { // Parameter assignments this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2021-12-02"; + this.version = options.version || "2023-11-03"; } } @@ -40416,6 +39971,9 @@ exports.StorageBlobAudience = void 0; */ StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; })(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -41261,6 +40819,9 @@ class BlobClient extends StorageClient { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && @@ -42564,6 +42125,9 @@ class BlockBlobClient extends BlobClient { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && @@ -45167,9 +44731,7 @@ class ContainerClient extends StorageClient { const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = { - name: BlobNameToString(blobPrefixInternal.name), - }; + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; @@ -45798,17114 +45360,39686 @@ class ContainerClient extends StorageClient { throw new Error("Unable to extract containerName with provided information."); } } - /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); - }); - } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this container. - */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); - } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +exports.KnownEncryptionAlgorithmType = void 0; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); + +Object.defineProperty(exports, "BaseRequestPolicy", ({ + enumerable: true, + get: function () { return coreHttp.BaseRequestPolicy; } +})); +Object.defineProperty(exports, "HttpHeaders", ({ + enumerable: true, + get: function () { return coreHttp.HttpHeaders; } +})); +Object.defineProperty(exports, "RequestPolicyOptions", ({ + enumerable: true, + get: function () { return coreHttp.RequestPolicyOptions; } +})); +Object.defineProperty(exports, "RestError", ({ + enumerable: true, + get: function () { return coreHttp.RestError; } +})); +Object.defineProperty(exports, "WebResource", ({ + enumerable: true, + get: function () { return coreHttp.WebResource; } +})); +Object.defineProperty(exports, "deserializationPolicy", ({ + enumerable: true, + get: function () { return coreHttp.deserializationPolicy; } +})); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 334: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 6762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(5030); +var import_before_after_hook = __nccwpck_require__(3682); +var import_request = __nccwpck_require__(6234); +var import_graphql = __nccwpck_require__(8467); +var import_auth_token = __nccwpck_require__(334); + +// pkg/dist-src/version.js +var VERSION = "5.1.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 9440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "9.0.4"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 8467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(6234); +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "7.0.2"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(6234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(6234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 4193: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.1.5"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 3044: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "10.2.0"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 537: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(8932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 6234: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(9440); +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "8.1.6"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(537); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 7171: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ContextAPI = void 0; +const NoopContextManager_1 = __nccwpck_require__(4118); +const global_utils_1 = __nccwpck_require__(5135); +const diag_1 = __nccwpck_require__(1877); +const API_NAME = 'context'; +const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +class ContextAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Context API */ + static getInstance() { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + } + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager) { + return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); + } + /** + * Get the currently active context + */ + active() { + return this._getContextManager().active(); + } + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with(context, fn, thisArg, ...args) { + return this._getContextManager().with(context, fn, thisArg, ...args); + } + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context, target) { + return this._getContextManager().bind(context, target); + } + _getContextManager() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; + } + /** Disable and remove the global context manager */ + disable() { + this._getContextManager().disable(); + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 1877: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagAPI = void 0; +const ComponentLogger_1 = __nccwpck_require__(7978); +const logLevelLogger_1 = __nccwpck_require__(9639); +const types_1 = __nccwpck_require__(8077); +const global_utils_1 = __nccwpck_require__(5135); +const API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +class DiagAPI { + /** + * Private internal constructor + * @private + */ + constructor() { + function _logProxy(funcName) { + return function (...args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName](...args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + const self = this; + // DiagAPI specific functions + const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { + var _a, _b, _c; + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + if (typeof optionsOrLogLevel === 'number') { + optionsOrLogLevel = { + logLevel: optionsOrLogLevel, + }; + } + const oldLogger = (0, global_utils_1.getGlobal)('diag'); + const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { + const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; + oldLogger.warn(`Current logger will be overwritten from ${stack}`); + newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); + } + return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); + }; + self.setLogger = setLogger; + self.disable = () => { + (0, global_utils_1.unregisterGlobal)(API_NAME, self); + }; + self.createComponentLogger = (options) => { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + static instance() { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + } +} +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map + +/***/ }), + +/***/ 7696: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MetricsAPI = void 0; +const NoopMeterProvider_1 = __nccwpck_require__(2647); +const global_utils_1 = __nccwpck_require__(5135); +const diag_1 = __nccwpck_require__(1877); +const API_NAME = 'metrics'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +class MetricsAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Metrics API */ + static getInstance() { + if (!this._instance) { + this._instance = new MetricsAPI(); + } + return this._instance; + } + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider) { + return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); + } + /** + * Returns the global meter provider. + */ + getMeterProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; + } + /** + * Returns a meter from the global meter provider. + */ + getMeter(name, version, options) { + return this.getMeterProvider().getMeter(name, version, options); + } + /** Remove the global meter provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.MetricsAPI = MetricsAPI; +//# sourceMappingURL=metrics.js.map + +/***/ }), + +/***/ 9909: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PropagationAPI = void 0; +const global_utils_1 = __nccwpck_require__(5135); +const NoopTextMapPropagator_1 = __nccwpck_require__(2368); +const TextMapPropagator_1 = __nccwpck_require__(865); +const context_helpers_1 = __nccwpck_require__(7682); +const utils_1 = __nccwpck_require__(8136); +const diag_1 = __nccwpck_require__(1877); +const API_NAME = 'propagation'; +const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +class PropagationAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.getActiveBaggage = context_helpers_1.getActiveBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + static getInstance() { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + } + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator) { + return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); + } + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { + return this._getGlobalPropagator().inject(context, carrier, setter); + } + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { + return this._getGlobalPropagator().extract(context, carrier, getter); + } + /** + * Return a list of all fields which may be used by the propagator. + */ + fields() { + return this._getGlobalPropagator().fields(); + } + /** Remove the global propagator */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } + _getGlobalPropagator() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + } +} +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map + +/***/ }), + +/***/ 1539: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceAPI = void 0; +const global_utils_1 = __nccwpck_require__(5135); +const ProxyTracerProvider_1 = __nccwpck_require__(2285); +const spancontext_utils_1 = __nccwpck_require__(9745); +const context_utils_1 = __nccwpck_require__(3326); +const diag_1 = __nccwpck_require__(1877); +const API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +class TraceAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; + this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; + this.deleteSpan = context_utils_1.deleteSpan; + this.getSpan = context_utils_1.getSpan; + this.getActiveSpan = context_utils_1.getActiveSpan; + this.getSpanContext = context_utils_1.getSpanContext; + this.setSpan = context_utils_1.setSpan; + this.setSpanContext = context_utils_1.setSpanContext; + } + /** Get the singleton instance of the Trace API */ + static getInstance() { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + } + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider) { + const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + } + /** + * Returns the global tracer provider. + */ + getTracerProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; + } + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name, version) { + return this.getTracerProvider().getTracer(name, version); + } + /** Remove the global tracer provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + } +} +exports.TraceAPI = TraceAPI; +//# sourceMappingURL=trace.js.map + +/***/ }), + +/***/ 7682: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; +const context_1 = __nccwpck_require__(7171); +const context_2 = __nccwpck_require__(8242); +/** + * Baggage key + */ +const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +function getActiveBaggage() { + return getBaggage(context_1.ContextAPI.getInstance().active()); +} +exports.getActiveBaggage = getActiveBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map + +/***/ }), + +/***/ 4811: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaggageImpl = void 0; +class BaggageImpl { + constructor(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + getEntry(key) { + const entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + } + getAllEntries() { + return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); + } + setEntry(key, entry) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + } + removeEntry(key) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + } + removeEntries(...keys) { + const newBaggage = new BaggageImpl(this._entries); + for (const key of keys) { + newBaggage._entries.delete(key); + } + return newBaggage; + } + clear() { + return new BaggageImpl(); + } +} +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage-impl.js.map + +/***/ }), + +/***/ 3542: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map + +/***/ }), + +/***/ 8136: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +const diag_1 = __nccwpck_require__(1877); +const baggage_impl_1 = __nccwpck_require__(4811); +const symbol_1 = __nccwpck_require__(3542); +const diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries = {}) { + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString() { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 7393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.context = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_1 = __nccwpck_require__(7171); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +//# sourceMappingURL=context-api.js.map + +/***/ }), + +/***/ 4118: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopContextManager = void 0; +const context_1 = __nccwpck_require__(8242); +class NoopContextManager { + active() { + return context_1.ROOT_CONTEXT; + } + with(_context, fn, thisArg, ...args) { + return fn.call(thisArg, ...args); + } + bind(_context, target) { + return target; + } + enable() { + return this; + } + disable() { + return this; + } +} +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map + +/***/ }), + +/***/ 8242: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +class BaseContext { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + constructor(parentContext) { + // for minification + const self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = (key) => self._currentContext.get(key); + self.setValue = (key, value) => { + const context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = (key) => { + const context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } +} +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 9721: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.diag = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const diag_1 = __nccwpck_require__(1877); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +//# sourceMappingURL=diag-api.js.map + +/***/ }), + +/***/ 7978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagComponentLogger = void 0; +const global_utils_1 = __nccwpck_require__(5135); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +class DiagComponentLogger { + constructor(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + debug(...args) { + return logProxy('debug', this._namespace, args); + } + error(...args) { + return logProxy('error', this._namespace, args); + } + info(...args) { + return logProxy('info', this._namespace, args); + } + warn(...args) { + return logProxy('warn', this._namespace, args); + } + verbose(...args) { + return logProxy('verbose', this._namespace, args); + } +} +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName](...args); +} +//# sourceMappingURL=ComponentLogger.js.map + +/***/ }), + +/***/ 3041: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagConsoleLogger = void 0; +const consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +class DiagConsoleLogger { + constructor() { + function _consoleFunc(funcName) { + return function (...args) { + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + let theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (let i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } +} +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map + +/***/ }), + +/***/ 9639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createLogLevelDiagLogger = void 0; +const types_1 = __nccwpck_require__(8077); +function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < types_1.DiagLogLevel.NONE) { + maxLevel = types_1.DiagLogLevel.NONE; + } + else if (maxLevel > types_1.DiagLogLevel.ALL) { + maxLevel = types_1.DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + const theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', types_1.DiagLogLevel.ERROR), + warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), + info: _filterFunc('info', types_1.DiagLogLevel.INFO), + debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), + }; +} +exports.createLogLevelDiagLogger = createLogLevelDiagLogger; +//# sourceMappingURL=logLevelLogger.js.map + +/***/ }), + +/***/ 8077: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 5163: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; +var utils_1 = __nccwpck_require__(8136); +Object.defineProperty(exports, "baggageEntryMetadataFromString", ({ enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } })); +// Context APIs +var context_1 = __nccwpck_require__(8242); +Object.defineProperty(exports, "createContextKey", ({ enumerable: true, get: function () { return context_1.createContextKey; } })); +Object.defineProperty(exports, "ROOT_CONTEXT", ({ enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } })); +// Diag APIs +var consoleLogger_1 = __nccwpck_require__(3041); +Object.defineProperty(exports, "DiagConsoleLogger", ({ enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } })); +var types_1 = __nccwpck_require__(8077); +Object.defineProperty(exports, "DiagLogLevel", ({ enumerable: true, get: function () { return types_1.DiagLogLevel; } })); +// Metrics APIs +var NoopMeter_1 = __nccwpck_require__(4837); +Object.defineProperty(exports, "createNoopMeter", ({ enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } })); +var Metric_1 = __nccwpck_require__(9999); +Object.defineProperty(exports, "ValueType", ({ enumerable: true, get: function () { return Metric_1.ValueType; } })); +// Propagation APIs +var TextMapPropagator_1 = __nccwpck_require__(865); +Object.defineProperty(exports, "defaultTextMapGetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } })); +Object.defineProperty(exports, "defaultTextMapSetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } })); +var ProxyTracer_1 = __nccwpck_require__(3503); +Object.defineProperty(exports, "ProxyTracer", ({ enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } })); +var ProxyTracerProvider_1 = __nccwpck_require__(2285); +Object.defineProperty(exports, "ProxyTracerProvider", ({ enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } })); +var SamplingResult_1 = __nccwpck_require__(3209); +Object.defineProperty(exports, "SamplingDecision", ({ enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } })); +var span_kind_1 = __nccwpck_require__(1424); +Object.defineProperty(exports, "SpanKind", ({ enumerable: true, get: function () { return span_kind_1.SpanKind; } })); +var status_1 = __nccwpck_require__(8845); +Object.defineProperty(exports, "SpanStatusCode", ({ enumerable: true, get: function () { return status_1.SpanStatusCode; } })); +var trace_flags_1 = __nccwpck_require__(6905); +Object.defineProperty(exports, "TraceFlags", ({ enumerable: true, get: function () { return trace_flags_1.TraceFlags; } })); +var utils_2 = __nccwpck_require__(2615); +Object.defineProperty(exports, "createTraceState", ({ enumerable: true, get: function () { return utils_2.createTraceState; } })); +var spancontext_utils_1 = __nccwpck_require__(9745); +Object.defineProperty(exports, "isSpanContextValid", ({ enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } })); +Object.defineProperty(exports, "isValidTraceId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } })); +Object.defineProperty(exports, "isValidSpanId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } })); +var invalid_span_constants_1 = __nccwpck_require__(1760); +Object.defineProperty(exports, "INVALID_SPANID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } })); +Object.defineProperty(exports, "INVALID_TRACEID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } })); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } })); +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_api_1 = __nccwpck_require__(7393); +Object.defineProperty(exports, "context", ({ enumerable: true, get: function () { return context_api_1.context; } })); +const diag_api_1 = __nccwpck_require__(9721); +Object.defineProperty(exports, "diag", ({ enumerable: true, get: function () { return diag_api_1.diag; } })); +const metrics_api_1 = __nccwpck_require__(2601); +Object.defineProperty(exports, "metrics", ({ enumerable: true, get: function () { return metrics_api_1.metrics; } })); +const propagation_api_1 = __nccwpck_require__(7591); +Object.defineProperty(exports, "propagation", ({ enumerable: true, get: function () { return propagation_api_1.propagation; } })); +const trace_api_1 = __nccwpck_require__(8989); +Object.defineProperty(exports, "trace", ({ enumerable: true, get: function () { return trace_api_1.trace; } })); +// Default export. +exports["default"] = { + context: context_api_1.context, + diag: diag_api_1.diag, + metrics: metrics_api_1.metrics, + propagation: propagation_api_1.propagation, + trace: trace_api_1.trace, +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5135: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +const platform_1 = __nccwpck_require__(9957); +const version_1 = __nccwpck_require__(8996); +const semver_1 = __nccwpck_require__(1522); +const major = version_1.VERSION.split('.')[0]; +const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); +const _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride = false) { + var _a; + const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); + const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map + +/***/ }), + +/***/ 1522: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCompatible = exports._makeCompatibilityCheck = void 0; +const version_1 = __nccwpck_require__(8996); +const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +function _makeCompatibilityCheck(ownVersion) { + const acceptedVersions = new Set([ownVersion]); + const rejectedVersions = new Set(); + const myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return () => false; + } + const ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + const globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + const globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +exports._makeCompatibilityCheck = _makeCompatibilityCheck; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); +//# sourceMappingURL=semver.js.map + +/***/ }), + +/***/ 2601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.metrics = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const metrics_1 = __nccwpck_require__(7696); +/** Entrypoint for metrics API */ +exports.metrics = metrics_1.MetricsAPI.getInstance(); +//# sourceMappingURL=metrics-api.js.map + +/***/ }), + +/***/ 9999: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ValueType = void 0; +/** The Type of value. It describes how the data is reported. */ +var ValueType; +(function (ValueType) { + ValueType[ValueType["INT"] = 0] = "INT"; + ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; +})(ValueType = exports.ValueType || (exports.ValueType = {})); +//# sourceMappingURL=Metric.js.map + +/***/ }), + +/***/ 4837: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +class NoopMeter { + constructor() { } + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name, _options) { + return exports.NOOP_HISTOGRAM_METRIC; + } + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name, _options) { + return exports.NOOP_COUNTER_METRIC; + } + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name, _options) { + return exports.NOOP_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name, _options) { + return exports.NOOP_OBSERVABLE_GAUGE_METRIC; + } + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback, _observables) { } + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback) { } +} +exports.NoopMeter = NoopMeter; +class NoopMetric { +} +exports.NoopMetric = NoopMetric; +class NoopCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopCounterMetric = NoopCounterMetric; +class NoopUpDownCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; +class NoopHistogramMetric extends NoopMetric { + record(_value, _attributes) { } +} +exports.NoopHistogramMetric = NoopHistogramMetric; +class NoopObservableMetric { + addCallback(_callback) { } + removeCallback(_callback) { } +} +exports.NoopObservableMetric = NoopObservableMetric; +class NoopObservableCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableCounterMetric = NoopObservableCounterMetric; +class NoopObservableGaugeMetric extends NoopObservableMetric { +} +exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; +class NoopObservableUpDownCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; +exports.NOOP_METER = new NoopMeter(); +// Synchronous instruments +exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); +exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); +exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); +// Asynchronous instruments +exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); +exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); +exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +/** + * Create a no-op Meter + */ +function createNoopMeter() { + return exports.NOOP_METER; +} +exports.createNoopMeter = createNoopMeter; +//# sourceMappingURL=NoopMeter.js.map + +/***/ }), + +/***/ 2647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; +const NoopMeter_1 = __nccwpck_require__(4837); +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +class NoopMeterProvider { + getMeter(_name, _version, _options) { + return NoopMeter_1.NOOP_METER; + } +} +exports.NoopMeterProvider = NoopMeterProvider; +exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); +//# sourceMappingURL=NoopMeterProvider.js.map + +/***/ }), + +/***/ 9957: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(7200), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9406: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map + +/***/ }), + +/***/ 7200: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(9406), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7591: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.propagation = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const propagation_1 = __nccwpck_require__(9909); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +//# sourceMappingURL=propagation-api.js.map + +/***/ }), + +/***/ 2368: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTextMapPropagator = void 0; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +class NoopTextMapPropagator { + /** Noop inject function does nothing */ + inject(_context, _carrier) { } + /** Noop extract function does nothing and returns the input context */ + extract(context, _carrier) { + return context; + } + fields() { + return []; + } +} +exports.NoopTextMapPropagator = NoopTextMapPropagator; +//# sourceMappingURL=NoopTextMapPropagator.js.map + +/***/ }), + +/***/ 865: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; +exports.defaultTextMapGetter = { + get(carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys(carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +exports.defaultTextMapSetter = { + set(carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map + +/***/ }), + +/***/ 8989: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.trace = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const trace_1 = __nccwpck_require__(1539); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +//# sourceMappingURL=trace-api.js.map + +/***/ }), + +/***/ 1462: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NonRecordingSpan = void 0; +const invalid_span_constants_1 = __nccwpck_require__(1760); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +class NonRecordingSpan { + constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { + this._spanContext = _spanContext; + } + // Returns a SpanContext. + spanContext() { + return this._spanContext; + } + // By default does nothing + setAttribute(_key, _value) { + return this; + } + // By default does nothing + setAttributes(_attributes) { + return this; + } + // By default does nothing + addEvent(_name, _attributes) { + return this; + } + // By default does nothing + setStatus(_status) { + return this; + } + // By default does nothing + updateName(_name) { + return this; + } + // By default does nothing + end(_endTime) { } + // isRecording always returns false for NonRecordingSpan. + isRecording() { + return false; + } + // By default does nothing + recordException(_exception, _time) { } +} +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map + +/***/ }), + +/***/ 7606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTracer = void 0; +const context_1 = __nccwpck_require__(7171); +const context_utils_1 = __nccwpck_require__(3326); +const NonRecordingSpan_1 = __nccwpck_require__(1462); +const spancontext_utils_1 = __nccwpck_require__(9745); +const contextApi = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +class NoopTracer { + // startSpan starts a noop span. + startSpan(name, options, context = contextApi.active()) { + const root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); + if (isSpanContext(parentFromContext) && + (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + } + startActiveSpan(name, arg2, arg3, arg4) { + let opts; + let ctx; + let fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); + const span = this.startSpan(name, opts, parentContext); + const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); + return contextApi.with(contextWithSpanSet, fn, undefined, span); + } +} +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map + +/***/ }), + +/***/ 3259: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTracerProvider = void 0; +const NoopTracer_1 = __nccwpck_require__(7606); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +class NoopTracerProvider { + getTracer(_name, _version, _options) { + return new NoopTracer_1.NoopTracer(); + } +} +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map + +/***/ }), + +/***/ 3503: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProxyTracer = void 0; +const NoopTracer_1 = __nccwpck_require__(7606); +const NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +class ProxyTracer { + constructor(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + startSpan(name, options, context) { + return this._getTracer().startSpan(name, options, context); + } + startActiveSpan(_name, _options, _context, _fn) { + const tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + } + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + _getTracer() { + if (this._delegate) { + return this._delegate; + } + const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + } +} +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map + +/***/ }), + +/***/ 2285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProxyTracerProvider = void 0; +const ProxyTracer_1 = __nccwpck_require__(3503); +const NoopTracerProvider_1 = __nccwpck_require__(3259); +const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +class ProxyTracerProvider { + /** + * Get a {@link ProxyTracer} + */ + getTracer(name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + } + getDelegate() { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + } + /** + * Set the delegate tracer provider + */ + setDelegate(delegate) { + this._delegate = delegate; + } + getDelegateTracer(name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + } +} +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map + +/***/ }), + +/***/ 3209: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SamplingDecision = void 0; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map + +/***/ }), + +/***/ 3326: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; +const context_1 = __nccwpck_require__(8242); +const NonRecordingSpan_1 = __nccwpck_require__(1462); +const context_2 = __nccwpck_require__(7171); +/** + * span key + */ +const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Gets the span from the current context, if one exists. + */ +function getActiveSpan() { + return getSpan(context_2.ContextAPI.getInstance().active()); +} +exports.getActiveSpan = getActiveSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +exports.deleteSpan = deleteSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +exports.getSpanContext = getSpanContext; +//# sourceMappingURL=context-utils.js.map + +/***/ }), + +/***/ 2110: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceStateImpl = void 0; +const tracestate_validators_1 = __nccwpck_require__(4864); +const MAX_TRACE_STATE_ITEMS = 32; +const MAX_TRACE_STATE_LEN = 512; +const LIST_MEMBERS_SEPARATOR = ','; +const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +class TraceStateImpl { + constructor(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + set(key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + const traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + } + unset(key) { + const traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + } + get(key) { + return this._internalState.get(key); + } + serialize() { + return this._keys() + .reduce((agg, key) => { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + } + _parse(rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce((agg, part) => { + const listMember = part.trim(); // Optional Whitespace (OWS) handling + const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + const key = listMember.slice(0, i); + const value = listMember.slice(i + 1, part.length); + if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + } + _keys() { + return Array.from(this._internalState.keys()).reverse(); + } + _clone() { + const traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + } +} +exports.TraceStateImpl = TraceStateImpl; +//# sourceMappingURL=tracestate-impl.js.map + +/***/ }), + +/***/ 4864: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateValue = exports.validateKey = void 0; +const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; +const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; +const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); +const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +exports.validateKey = validateKey; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +exports.validateValue = validateValue; +//# sourceMappingURL=tracestate-validators.js.map + +/***/ }), + +/***/ 2615: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTraceState = void 0; +const tracestate_impl_1 = __nccwpck_require__(2110); +function createTraceState(rawTraceState) { + return new tracestate_impl_1.TraceStateImpl(rawTraceState); +} +exports.createTraceState = createTraceState; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 1760: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +const trace_flags_1 = __nccwpck_require__(6905); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map + +/***/ }), + +/***/ 1424: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SpanKind = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); +//# sourceMappingURL=span_kind.js.map + +/***/ }), + +/***/ 9745: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const invalid_span_constants_1 = __nccwpck_require__(1760); +const NonRecordingSpan_1 = __nccwpck_require__(1462); +const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map + +/***/ }), + +/***/ 8845: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SpanStatusCode = void 0; +/** + * An enumeration of status codes. + */ +var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); +//# sourceMappingURL=status.js.map + +/***/ }), + +/***/ 6905: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceFlags = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map + +/***/ }), + +/***/ 8996: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VERSION = void 0; +// this is autogenerated file, see scripts/version-update.js +exports.VERSION = '1.7.0'; +//# sourceMappingURL=version.js.map + +/***/ }), + +/***/ 4812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = +{ + parallel : __nccwpck_require__(8210), + serial : __nccwpck_require__(445), + serialOrdered : __nccwpck_require__(3578) +}; + + +/***/ }), + +/***/ 1700: +/***/ ((module) => { + +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} + + +/***/ }), + +/***/ 2794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var defer = __nccwpck_require__(5295); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} + + +/***/ }), + +/***/ 5295: +/***/ ((module) => { + +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} + + +/***/ }), + +/***/ 9023: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var async = __nccwpck_require__(2794) + , abort = __nccwpck_require__(1700) + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} + + +/***/ }), + +/***/ 2474: +/***/ ((module) => { + +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} + + +/***/ }), + +/***/ 7942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var abort = __nccwpck_require__(1700) + , async = __nccwpck_require__(2794) + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} + + +/***/ }), + +/***/ 8210: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(2474) + , terminator = __nccwpck_require__(7942) + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} + + +/***/ }), + +/***/ 445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var serialOrdered = __nccwpck_require__(3578); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} + + +/***/ }), + +/***/ 3578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(2474) + , terminator = __nccwpck_require__(7942) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} + + +/***/ }), + +/***/ 9417: +/***/ ((module) => { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), + +/***/ 3682: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var register = __nccwpck_require__(4670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} + +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} + +function HookCollection() { + var state = { + registry: {}, + }; + + var hook = register.bind(null, state); + bindApi(hook, state); + + return hook; +} + +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); +} + +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); + +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; + + +/***/ }), + +/***/ 5549: +/***/ ((module) => { + +module.exports = addHook; + +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } + + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } + + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} + + +/***/ }), + +/***/ 4670: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + + if (!options) { + options = {}; + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } + + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} + + +/***/ }), + +/***/ 6819: +/***/ ((module) => { + +module.exports = removeHook; + +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } + + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); + + if (index === -1) { + return; + } + + state.registry[name].splice(index, 1); +} + + +/***/ }), + +/***/ 3717: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var concatMap = __nccwpck_require__(6891); +var balanced = __nccwpck_require__(9417); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + + +/***/ }), + +/***/ 5443: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(3837); +var Stream = (__nccwpck_require__(2781).Stream); +var DelayedStream = __nccwpck_require__(8611); + +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; + + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); + +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } + + return combinedStream; +}; + +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; + } + + this._handleErrors(stream); + + if (this.pauseStreams) { + stream.pause(); + } + } + + this._streams.push(stream); + return this; +}; + +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } + + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; + +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; + } + + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } + + this._pipeNext(stream); + }.bind(this)); +}; + +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; + + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + + var value = stream; + this.write(value); + this._getNext(); +}; + +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; + +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; + + +/***/ }), + +/***/ 6891: +/***/ ((module) => { + +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + + +/***/ }), + +/***/ 8611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Stream = (__nccwpck_require__(2781).Stream); +var util = __nccwpck_require__(3837); + +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; + + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); + +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } + + delayedStream.source = source; + + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; + + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } + + return delayedStream; +}; + +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); + +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; + +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } + + this.source.resume(); +}; + +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; + +DelayedStream.prototype.release = function() { + this._released = true; + + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; + +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; + +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } + + this._bufferedEvents.push(args); +}; + +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } + + if (this.dataSize <= this.maxDataSize) { + return; + } + + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; + + +/***/ }), + +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 7426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __nccwpck_require__(3765) + + +/***/ }), + +/***/ 3583: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __nccwpck_require__(7426) +var extname = (__nccwpck_require__(1017).extname) + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} + + +/***/ }), + +/***/ 3973: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __nccwpck_require__(3717) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + + +/***/ }), + +/***/ 467: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__nccwpck_require__(2781)); +var http = _interopDefault(__nccwpck_require__(3685)); +var Url = _interopDefault(__nccwpck_require__(7310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); +var https = _interopDefault(__nccwpck_require__(5687)); +var zlib = _interopDefault(__nccwpck_require__(9796)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = (__nccwpck_require__(2877).convert); +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; + + +/***/ }), + +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 2043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = (__nccwpck_require__(2781).Stream) + } catch (ex) { + Stream = function () {} + } + if (!Stream) Stream = function () {} + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = (__nccwpck_require__(1576).StringDecoder) + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //