diff --git a/action.yml b/action.yml index a5a90fa..c36b893 100644 --- a/action.yml +++ b/action.yml @@ -7,6 +7,9 @@ inputs: bucket-prefix: description: "Prefix of the bucket name. The PR number will be appended to this value." required: true + bucket-region: + description: "Region of the S3 bucket." + required: true folder-to-copy: description: "Folder to copy assets from into S3." required: false diff --git a/dist/index.js b/dist/index.js index 04275fe..5227240 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,258 +1,107 @@ -module.exports = -/******/ (function(modules, runtime) { // webpackBootstrap -/******/ "use strict"; -/******/ // The module cache -/******/ var installedModules = {}; -/******/ -/******/ // The require function -/******/ function __webpack_require__(moduleId) { -/******/ -/******/ // Check if module is in cache -/******/ if(installedModules[moduleId]) { -/******/ return installedModules[moduleId].exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = installedModules[moduleId] = { -/******/ i: moduleId, -/******/ l: false, -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete installedModules[moduleId]; -/******/ } -/******/ -/******/ // Flag the module as loaded -/******/ module.l = true; -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/******/ -/******/ __webpack_require__.ab = __dirname + "/"; -/******/ -/******/ // the startup function -/******/ function startup() { -/******/ // Load entry module and return exports -/******/ return __webpack_require__(325); -/******/ }; -/******/ -/******/ // run startup -/******/ return startup(); -/******/ }) -/************************************************************************/ -/******/ ([ -/* 0 */, -/* 1 */, -/* 2 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); -const macosRelease = __webpack_require__(118); -const winRelease = __webpack_require__(49); - -const osName = (platform, release) => { - if (!platform && release) { - throw new Error('You can\'t specify a `release` without specifying `platform`'); - } - - platform = platform || os.platform(); - - let id; - - if (platform === 'darwin') { - if (!release && os.platform() === 'darwin') { - release = os.release(); - } - - const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; - id = release ? macosRelease(release).name : ''; - return prefix + (id ? ' ' + id : ''); - } - - if (platform === 'linux') { - if (!release && os.platform() === 'linux') { - release = os.release(); - } - - id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; - return 'Linux' + (id ? ' ' + id : ''); - } - - if (platform === 'win32') { - if (!release && os.platform() === 'win32') { - release = os.release(); - } - - id = release ? winRelease(release) : ''; - return 'Windows' + (id ? ' ' + id : ''); - } - - return platform; -}; - -module.exports = osName; - - -/***/ }), -/* 3 */, -/* 4 */, -/* 5 */, -/* 6 */, -/* 7 */, -/* 8 */, -/* 9 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(969); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ - if (isChildProcess(stream)) stream.on('exit', onexit); +/***/ 7351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); +"use strict"; - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; - -module.exports = eos; - - -/***/ }), -/* 10 */, -/* 11 */ -/***/ (function(module) { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(5278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; } - return ret - } } - +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map /***/ }), -/* 12 */, -/* 13 */, -/* 14 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/***/ 2186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -284,8840 +133,6157 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const github = __importStar(__webpack_require__(469)); -const githubClient_1 = __importDefault(__webpack_require__(119)); -const dayjs_1 = __importDefault(__webpack_require__(874)); -exports.default = (repo, environmentPrefix) => __awaiter(void 0, void 0, void 0, function* () { - const githubEventName = github.context.eventName; - const environment = githubEventName === 'pull_request' - ? `${environmentPrefix || 'PR-'}${github.context.payload.pull_request.number}` - : `${environmentPrefix || 'ACTION-'}${dayjs_1.default().format('DD-MM-YYYY-hh:mma')}`; - const deployments = yield githubClient_1.default.repos.listDeployments({ - repo: repo.repo, - owner: repo.owner, - environment - }); - const existing = deployments.data.length; - if (existing < 1) { - console.log('No exiting deployments found for pull request'); - return; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(7351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(8041); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } - for (const deployment of deployments.data) { - console.log(`Deactivating existing deployment - ${deployment.id}`); - yield githubClient_1.default.repos.createDeploymentStatus(Object.assign(Object.assign({}, repo), { deployment_id: deployment.id, state: 'inactive' })); + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); } -}); - - -/***/ }), -/* 15 */, -/* 16 */ -/***/ (function(module) { - -module.exports = require("tls"); - -/***/ }), -/* 17 */, -/* 18 */ -/***/ (function(module) { - -module.exports = eval("require")("encoding"); - - -/***/ }), -/* 19 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Generated by CoffeeScript 1.12.7 -(function() { - var NodeType, XMLDTDNotation, XMLNode, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; - - XMLNode = __webpack_require__(855); - - NodeType = __webpack_require__(35); - - module.exports = XMLDTDNotation = (function(superClass) { - extend(XMLDTDNotation, superClass); - - function XMLDTDNotation(parent, name, value) { - XMLDTDNotation.__super__.constructor.call(this, parent); - if (name == null) { - throw new Error("Missing DTD notation name. " + this.debugInfo(name)); - } - if (!value.pubID && !value.sysID) { - throw new Error("Public or system identifiers are required for an external entity. " + this.debugInfo(name)); - } - this.name = this.stringify.name(name); - this.type = NodeType.NotationDeclaration; - if (value.pubID != null) { - this.pubID = this.stringify.dtdPubID(value.pubID); - } - if (value.sysID != null) { - this.sysID = this.stringify.dtdSysID(value.sysID); - } + else { + command_1.issueCommand('add-path', {}, inputPath); } - - Object.defineProperty(XMLDTDNotation.prototype, 'publicId', { - get: function() { - return this.pubID; - } - }); - - Object.defineProperty(XMLDTDNotation.prototype, 'systemId', { - get: function() { - return this.sysID; - } - }); - - XMLDTDNotation.prototype.toString = function(options) { - return this.options.writer.dtdNotation(this, this.options.writer.filterOptions(options)); - }; - - return XMLDTDNotation; - - })(XMLNode); - -}).call(this); - - -/***/ }), -/* 20 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const cp = __webpack_require__(129); -const parse = __webpack_require__(568); -const enoent = __webpack_require__(881); - -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); - - return spawned; -} - -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); - - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); - - return result; + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; } - -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; - -module.exports._parse = parse; -module.exports._enoent = enoent; - - -/***/ }), -/* 21 */, -/* 22 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); -var STS = __webpack_require__(106); -var iniLoader = AWS.util.iniLoader; - -var ASSUME_ROLE_DEFAULT_REGION = 'us-east-1'; - +exports.addPath = addPath; /** - * Represents credentials loaded from shared credentials file - * (defaulting to ~/.aws/credentials or defined by the - * `AWS_SHARED_CREDENTIALS_FILE` environment variable). - * - * ## Using the shared credentials file - * - * This provider is checked by default in the Node.js environment. To use the - * credentials file provider, simply add your access and secret keys to the - * ~/.aws/credentials file in the following format: - * - * [default] - * aws_access_key_id = AKID... - * aws_secret_access_key = YOUR_SECRET_KEY + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * - * ## Using custom profiles + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. * - * The SDK supports loading credentials for separate profiles. This can be done - * in two ways: + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to - * loading the SDK. - * 2. Directly load the AWS.SharedIniFileCredentials provider: + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 * - * ```javascript - * var creds = new AWS.SharedIniFileCredentials({profile: 'myprofile'}); - * AWS.config.credentials = creds; - * ``` + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. * - * @!macro nobrowser + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ -AWS.SharedIniFileCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new SharedIniFileCredentials object. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var) - * the filename to use when loading credentials. - * @option options disableAssumeRole [Boolean] (false) True to disable - * support for profiles that assume an IAM role. If true, and an assume - * role profile is selected, an error is raised. - * @option options preferStaticCredentials [Boolean] (false) True to - * prefer static credentials to role_arn if both are present. - * @option options tokenCodeFn [Function] (null) Function to provide - * STS Assume Role TokenCode, if mfa_serial is provided for profile in ini - * file. Function is called with value of mfa_serial and callback, and - * should provide the TokenCode or an error to the callback in the format - * callback(err, token) - * @option options callback [Function] (err) Credentials are eagerly loaded - * by the constructor. When the callback is called with no error, the - * credentials have been loaded successfully. - * @option options httpOptions [map] A set of options to pass to the low-level - * HTTP request. Currently supported options are: - * * **proxy** [String] — the URL to proxy requests through - * * **agent** [http.Agent, https.Agent] — the Agent object to perform - * HTTP requests with. Used for connection pooling. Defaults to the global - * agent (`http.globalAgent`) for non-SSL connections. Note that for - * SSL connections, a special Agent object is used in order to enable - * peer certificate verification. This feature is only available in the - * Node.js environment. - * * **connectTimeout** [Integer] — Sets the socket to timeout after - * failing to establish a connection with the server after - * `connectTimeout` milliseconds. This timeout has no effect once a socket - * connection has been established. - * * **timeout** [Integer] — The number of milliseconds a request can - * take before automatically being terminated. - * Defaults to two minutes (120000). - */ - constructor: function SharedIniFileCredentials(options) { - AWS.Credentials.call(this); - - options = options || {}; - - this.filename = options.filename; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.disableAssumeRole = Boolean(options.disableAssumeRole); - this.preferStaticCredentials = Boolean(options.preferStaticCredentials); - this.tokenCodeFn = options.tokenCodeFn || null; - this.httpOptions = options.httpOptions || null; - this.get(options.callback || AWS.util.fn.noop); - }, - - /** - * @api private - */ - load: function load(callback) { - var self = this; - try { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' not found'), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - - /* - In the CLI, the presence of both a role_arn and static credentials have - different meanings depending on how many profiles have been visited. For - the first profile processed, role_arn takes precedence over any static - credentials, but for all subsequent profiles, static credentials are - used if present, and only in their absence will the profile's - source_profile and role_arn keys be used to load another set of - credentials. This var is intended to yield compatible behaviour in this - sdk. - */ - var preferStaticCredentialsToRoleArn = Boolean( - this.preferStaticCredentials - && profile['aws_access_key_id'] - && profile['aws_secret_access_key'] - ); - - if (profile['role_arn'] && !preferStaticCredentialsToRoleArn) { - this.loadRoleProfile(profiles, profile, function(err, data) { - if (err) { - callback(err); - } else { - self.expired = false; - self.accessKeyId = data.Credentials.AccessKeyId; - self.secretAccessKey = data.Credentials.SecretAccessKey; - self.sessionToken = data.Credentials.SessionToken; - self.expireTime = data.Credentials.Expiration; - callback(null); - } - }); - return; - } - - this.accessKeyId = profile['aws_access_key_id']; - this.secretAccessKey = profile['aws_secret_access_key']; - this.sessionToken = profile['aws_session_token']; - - if (!this.accessKeyId || !this.secretAccessKey) { - throw AWS.util.error( - new Error('Credentials not set for profile ' + this.profile), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - this.expired = false; - callback(null); - } catch (err) { - callback(err); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); } - }, - - /** - * Loads the credentials from the shared credentials file - * - * @callback callback function(err) - * Called after the shared INI file on disk is read and parsed. When this - * callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh( - callback || AWS.util.fn.callback, - this.disableAssumeRole - ); - }, - - /** - * @api private - */ - loadRoleProfile: function loadRoleProfile(creds, roleProfile, callback) { - if (this.disableAssumeRole) { - throw AWS.util.error( - new Error('Role assumption profiles are disabled. ' + - 'Failed to load profile ' + this.profile + - ' from ' + creds.filename), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map - var self = this; - var roleArn = roleProfile['role_arn']; - var roleSessionName = roleProfile['role_session_name']; - var externalId = roleProfile['external_id']; - var mfaSerial = roleProfile['mfa_serial']; - var sourceProfileName = roleProfile['source_profile']; - - // From experimentation, the following behavior mimics the AWS CLI: - // - // 1. Use region from the profile if present. - // 2. Otherwise fall back to N. Virginia (global endpoint). - // - // It is necessary to do the fallback explicitly, because if - // 'AWS_STS_REGIONAL_ENDPOINTS=regional', the underlying STS client will - // otherwise throw an error if region is left 'undefined'. - // - // Experimentation shows that the AWS CLI (tested at version 1.18.136) - // ignores the following potential sources of a region for the purposes of - // this AssumeRole call: - // - // - The [default] profile - // - The AWS_REGION environment variable - // - // Ignoring the [default] profile for the purposes of AssumeRole is arguably - // a bug in the CLI since it does use the [default] region for service - // calls... but right now we're matching behavior of the other tool. - var profileRegion = roleProfile['region'] || ASSUME_ROLE_DEFAULT_REGION; +/***/ }), - if (!sourceProfileName) { - throw AWS.util.error( - new Error('source_profile is not set using profile ' + this.profile), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - var sourceProfileExistanceTest = creds[sourceProfileName]; +"use strict"; - if (typeof sourceProfileExistanceTest !== 'object') { - throw AWS.util.error( - new Error('source_profile ' + sourceProfileName + ' using profile ' - + this.profile + ' does not exist'), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(8974); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map - var sourceCredentials = new AWS.SharedIniFileCredentials( - AWS.util.merge(this.options || {}, { - profile: sourceProfileName, - preferStaticCredentials: true - }) - ); +/***/ }), - this.roleArn = roleArn; - var sts = new STS({ - credentials: sourceCredentials, - region: profileRegion, - httpOptions: this.httpOptions - }); +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - var roleParams = { - RoleArn: roleArn, - RoleSessionName: roleSessionName || 'aws-sdk-js-' + Date.now() - }; +"use strict"; - if (externalId) { - roleParams.ExternalId = externalId; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(1404); +const auth_1 = __nccwpck_require__(6758); +const core_1 = __nccwpck_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); } - - if (mfaSerial && self.tokenCodeFn) { - roleParams.SerialNumber = mfaSerial; - self.tokenCodeFn(mfaSerial, function(err, token) { - if (err) { - var message; - if (err instanceof Error) { - message = err.message; - } else { - message = err; - } - callback( - AWS.util.error( - new Error('Error fetching MFA token: ' + message), - { code: 'SharedIniFileCredentialsProviderFailure' } - )); - return; + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); } - - roleParams.TokenCode = token; - sts.assumeRole(roleParams, callback); - }); - return; + return token; } - sts.assumeRole(roleParams, callback); - } -}); - + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map /***/ }), -/* 23 */, -/* 24 */, -/* 25 */, -/* 26 */, -/* 27 */, -/* 28 */, -/* 29 */, -/* 30 */, -/* 31 */, -/* 32 */, -/* 33 */, -/* 34 */, -/* 35 */ -/***/ (function(module) { - -// Generated by CoffeeScript 1.12.7 -(function() { - module.exports = { - Element: 1, - Attribute: 2, - Text: 3, - CData: 4, - EntityReference: 5, - EntityDeclaration: 6, - ProcessingInstruction: 7, - Comment: 8, - Document: 9, - DocType: 10, - DocumentFragment: 11, - NotationDeclaration: 12, - Declaration: 201, - Raw: 202, - AttributeDeclaration: 203, - ElementDeclaration: 204, - Dummy: 205 - }; - -}).call(this); - -/***/ }), -/* 36 */, -/* 37 */, -/* 38 */, -/* 39 */, -/* 40 */, -/* 41 */, -/* 42 */, -/* 43 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -var AWS = __webpack_require__(395); -var STS = __webpack_require__(106); +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); /** - * Represents temporary credentials retrieved from {AWS.STS}. Without any - * extra parameters, credentials will be fetched from the - * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the - * {AWS.STS.assumeRole} operation will be used to fetch credentials for the - * role instead. - * - * @note AWS.TemporaryCredentials is deprecated, but remains available for - * backwards compatibility. {AWS.ChainableTemporaryCredentials} is the - * preferred class for temporary credentials. - * - * To setup temporary credentials, configure a set of master credentials - * using the standard credentials providers (environment, EC2 instance metadata, - * or from the filesystem), then set the global credentials to a new - * temporary credentials object: - * - * ```javascript - * // Note that environment credentials are loaded by default, - * // the following line is shown for clarity: - * AWS.config.credentials = new AWS.EnvironmentCredentials('AWS'); - * - * // Now set temporary credentials seeded from the master credentials - * AWS.config.credentials = new AWS.TemporaryCredentials(); + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. * - * // subsequent requests will now use temporary credentials from AWS STS. - * new AWS.S3().listBucket(function(err, data) { ... }); - * ``` + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. * - * @!attribute masterCredentials - * @return [AWS.Credentials] the master (non-temporary) credentials used to - * get and refresh temporary credentials from AWS STS. - * @note (see constructor) + * @param pth. Path to transform. + * @return string Win32 path. */ -AWS.TemporaryCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new temporary credentials object. - * - * @note In order to create temporary credentials, you first need to have - * "master" credentials configured in {AWS.Config.credentials}. These - * master credentials are necessary to retrieve the temporary credentials, - * as well as refresh the credentials when they expire. - * @param params [map] a map of options that are passed to the - * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. - * If a `RoleArn` parameter is passed in, credentials will be based on the - * IAM role. - * @param masterCredentials [AWS.Credentials] the master (non-temporary) credentials - * used to get and refresh temporary credentials from AWS STS. - * @example Creating a new credentials object for generic temporary credentials - * AWS.config.credentials = new AWS.TemporaryCredentials(); - * @example Creating a new credentials object for an IAM role - * AWS.config.credentials = new AWS.TemporaryCredentials({ - * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials', - * }); - * @see AWS.STS.assumeRole - * @see AWS.STS.getSessionToken - */ - constructor: function TemporaryCredentials(params, masterCredentials) { - AWS.Credentials.call(this); - this.loadMasterCredentials(masterCredentials); - this.expired = true; +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map - this.params = params || {}; - if (this.params.RoleArn) { - this.params.RoleSessionName = - this.params.RoleSessionName || 'temporary-credentials'; - } - }, +/***/ }), - /** - * Refreshes credentials using {AWS.STS.assumeRole} or - * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed - * to the credentials {constructor}. - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh (callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - /** - * @api private - */ - load: function load (callback) { - var self = this; - self.createClients(); - self.masterCredentials.get(function () { - self.service.config.credentials = self.masterCredentials; - var operation = self.params.RoleArn ? - self.service.assumeRole : self.service.getSessionToken; - operation.call(self.service, function (err, data) { - if (!err) { - self.service.credentialsFrom(data, self); - } - callback(err); - }); - }); - }, +"use strict"; - /** - * @api private - */ - loadMasterCredentials: function loadMasterCredentials (masterCredentials) { - this.masterCredentials = masterCredentials || AWS.config.credentials; - while (this.masterCredentials.masterCredentials) { - this.masterCredentials = this.masterCredentials.masterCredentials; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; } - - if (typeof this.masterCredentials.get !== 'function') { - this.masterCredentials = new AWS.Credentials(this.masterCredentials); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); } - }, - - /** - * @api private - */ - createClients: function () { - this.service = this.service || new STS({params: this.params}); - } - -}); - - -/***/ }), -/* 44 */, -/* 45 */, -/* 46 */, -/* 47 */, -/* 48 */ -/***/ (function(module, exports) { - -exports = module.exports = SemVer - -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} } +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 - -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' - -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' - -src[FULL] = '^' + FULLPLAIN + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' - -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' - -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' - -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' - -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' - -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' - -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' +/***/ }), -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' +"use strict"; -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' +/***/ }), -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' +/***/ 6758: +/***/ (function(__unused_webpack_module, exports) { -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' +"use strict"; -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } - -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; } - } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map - if (version instanceof SemVer) { - return version - } +/***/ }), - if (typeof version !== 'string') { - return null - } +/***/ 1404: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (version.length > MAX_LENGTH) { - return null - } +"use strict"; - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(2843)); +const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; } - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } } - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } +/***/ }), - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } +/***/ 2843: +/***/ ((__unused_webpack_module, exports) => { - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +"use strict"; - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +/***/ }), - this.raw = version +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +"use strict"; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; } - return this.version -} +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); -SemVer.prototype.toString = function () { - return this.version -} +var _v = _interopRequireDefault(__nccwpck_require__(1595)); -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +var _v2 = _interopRequireDefault(__nccwpck_require__(6993)); - return this.compareMain(other) || this.comparePre(other) -} +var _v3 = _interopRequireDefault(__nccwpck_require__(1472)); -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +var _v4 = _interopRequireDefault(__nccwpck_require__(6217)); - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} +var _nil = _interopRequireDefault(__nccwpck_require__(2381)); -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +var _version = _interopRequireDefault(__nccwpck_require__(427)); - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} +/***/ }), -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} +"use strict"; -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} -exports.compareIdentifiers = compareIdentifiers +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - if (anum && bnum) { - a = +a - b = +b +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 + return _crypto.default.createHash('md5').update(bytes).digest(); } -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} +var _default = md5; +exports["default"] = _default; -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} +/***/ }), -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} +/***/ 2381: +/***/ ((__unused_webpack_module, exports) => { -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} +"use strict"; -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} +/***/ }), -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) -} +/***/ 6385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} +"use strict"; -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b +var _default = parse; +exports["default"] = _default; - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b +/***/ }), - case '': - case '=': - case '==': - return eq(a, b, loose) +/***/ 6230: +/***/ ((__unused_webpack_module, exports) => { - case '!=': - return neq(a, b, loose) +"use strict"; - case '>': - return gt(a, b, loose) - case '>=': - return gte(a, b, loose) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; - case '<': - return lt(a, b, loose) +/***/ }), - case '<=': - return lte(a, b, loose) +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - default: - throw new TypeError('Invalid operator: ' + op) - } -} +"use strict"; -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; } - debug('comp', this) + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) +/***/ }), - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } +/***/ 8844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' - } +"use strict"; - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} -Comparator.prototype.toString = function () { - return this.value -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - if (this.semver === ANY) { - return true - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - if (typeof version === 'string') { - version = new SemVer(version, this.options) +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } - return cmp(version, this.operator, this.semver, this.options) + return _crypto.default.createHash('sha1').update(bytes).digest(); } -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } +var _default = sha1; +exports["default"] = _default; - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +/***/ }), - var rangeTmp +/***/ 1458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } +"use strict"; - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - if (range instanceof Comparator) { - return new Range(range.value, options) - } +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; - if (!(this instanceof Range)) { - return new Range(range, options) +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); } - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease + return uuid; +} - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) +var _default = stringify; +exports["default"] = _default; - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } +/***/ }), - this.format() -} +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} +"use strict"; -Range.prototype.toString = function () { - return this.range -} -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - // normalize spaces - range = range.split(/\s+/).join(' ') +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // At this point, the range is completely trimmed and - // ready to be split into comparators. +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) +let _clockseq; // Previous uuid creation time - return set -} -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) -} + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} + msecs += 12219292800000; // `time_low` -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - debug('tilde return', ret) - return ret - }) -} + b[i++] = clockseq & 0xff; // `node` -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); } -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret +var _default = v1; +exports["default"] = _default; - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } +/***/ }), - debug('caret return', ret) - return ret - }) -} +/***/ 6993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} +"use strict"; -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - if (gtlt === '=' && anyX) { - gtlt = '' - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 +var _v = _interopRequireDefault(__nccwpck_require__(5920)); - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } +var _md = _interopRequireDefault(__nccwpck_require__(5842)); - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - debug('xRange return', ret) +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; - return ret - }) -} +/***/ }), -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} +/***/ 5920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } +"use strict"; - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - return (from + ' ' + to).trim() -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); } - return false + + return bytes; } -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); } - } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; } + + return buf; } - // Version has a -pre, but it's not one of the ones we like. - return false - } + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) - return true -} -exports.satisfies = satisfies -function satisfies (version, range, options) { try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; } -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } +/***/ }), + +/***/ 1472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; } - }) - return max -} -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null + return buf; } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min + + return (0, _stringify.default)(rnds); } -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) +var _default = v4; +exports["default"] = _default; - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } +/***/ }), - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } +/***/ 6217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } +"use strict"; - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] +var _v = _interopRequireDefault(__nccwpck_require__(5920)); - var high = null - var low = null +var _sha = _interopRequireDefault(__nccwpck_require__(8844)); - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} +/***/ }), -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} +/***/ 2609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} +"use strict"; -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - if (typeof version !== 'string') { - return null - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - var match = version.match(re[COERCE]) +var _regex = _interopRequireDefault(__nccwpck_require__(6230)); - if (match == null) { - return null - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); } +var _default = validate; +exports["default"] = _default; /***/ }), -/* 49 */ -/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; - -const os = __webpack_require__(87); -const execa = __webpack_require__(955); +/***/ 427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Reference: https://www.gaijin.at/en/lstwinver.php -const names = new Map([ - ['10.0', '10'], - ['6.3', '8.1'], - ['6.2', '8'], - ['6.1', '7'], - ['6.0', 'Vista'], - ['5.2', 'Server 2003'], - ['5.1', 'XP'], - ['5.0', '2000'], - ['4.9', 'ME'], - ['4.1', '98'], - ['4.0', '95'] -]); +"use strict"; -const windowsRelease = release => { - const version = /\d+\.\d/.exec(release || os.release()); - if (release && !version) { - throw new Error('`release` argument doesn\'t match `n.n`'); - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - const ver = (version || [])[0]; +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - // Server 2008, 2012 and 2016 versions are ambiguous with desktop versions and must be detected at runtime. - // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version - // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx - // If the resulting caption contains the year 2008, 2012 or 2016, it is a server version, so return a server OS name. - if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { - const stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; - const year = (stdout.match(/2008|2012|2016/) || [])[0]; - if (year) { - return `Server ${year}`; - } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - return names.get(ver); -}; +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } -module.exports = windowsRelease; + return parseInt(uuid.substr(14, 1), 16); +} +var _default = version; +exports["default"] = _default; /***/ }), -/* 50 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var fs = __webpack_require__(747); -var p = __webpack_require__(622); -var minimatch = __webpack_require__(93); - -function patternMatcher(pattern) { - return function(path, stats) { - var minimatcher = new minimatch.Minimatch(pattern, { matchBase: true }); - return (!minimatcher.negate || stats.isFile()) && minimatcher.match(path); - }; -} -function toMatcherFunction(ignoreEntry) { - if (typeof ignoreEntry == "function") { - return ignoreEntry; - } else { - return patternMatcher(ignoreEntry); - } -} +/***/ 4087: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function readdir(path, ignores, callback) { - if (typeof ignores == "function") { - callback = ignores; - ignores = []; - } +"use strict"; - if (!callback) { - return new Promise(function(resolve, reject) { - readdir(path, ignores || [], function(err, data) { - if (err) { - reject(err); - } else { - resolve(data); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } } - }); - }); - } - - ignores = ignores.map(toMatcherFunction); - - var list = []; - - fs.readdir(path, function(err, files) { - if (err) { - return callback(err); + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; } - - var pending = files.length; - if (!pending) { - // we are done, woop woop - return callback(null, list); + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } - - files.forEach(function(file) { - var filePath = p.join(path, file); - fs.stat(filePath, function(_err, stats) { - if (_err) { - return callback(_err); - } - - if ( - ignores.some(function(matcher) { - return matcher(filePath, stats); - }) - ) { - pending -= 1; - if (!pending) { - return callback(null, list); - } - return null; + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; } - - if (stats.isDirectory()) { - readdir(filePath, ignores, function(__err, res) { - if (__err) { - return callback(__err); - } - - list = list.concat(res); - pending -= 1; - if (!pending) { - return callback(null, list); - } - }); - } else { - list.push(filePath); - pending -= 1; - if (!pending) { - return callback(null, list); - } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; } - }); - }); - }); + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } } - -module.exports = readdir; - +exports.Context = Context; +//# sourceMappingURL=context.js.map /***/ }), -/* 51 */, -/* 52 */, -/* 53 */, -/* 54 */, -/* 55 */, -/* 56 */ -/***/ (function(module) { - -module.exports = {"version":"2.0","metadata":{"apiVersion":"2014-06-30","endpointPrefix":"cognito-identity","jsonVersion":"1.1","protocol":"json","serviceFullName":"Amazon Cognito Identity","serviceId":"Cognito Identity","signatureVersion":"v4","targetPrefix":"AWSCognitoIdentityService","uid":"cognito-identity-2014-06-30"},"operations":{"CreateIdentityPool":{"input":{"type":"structure","required":["IdentityPoolName","AllowUnauthenticatedIdentities"],"members":{"IdentityPoolName":{},"AllowUnauthenticatedIdentities":{"type":"boolean"},"AllowClassicFlow":{"type":"boolean"},"SupportedLoginProviders":{"shape":"S5"},"DeveloperProviderName":{},"OpenIdConnectProviderARNs":{"shape":"S9"},"CognitoIdentityProviders":{"shape":"Sb"},"SamlProviderARNs":{"shape":"Sg"},"IdentityPoolTags":{"shape":"Sh"}}},"output":{"shape":"Sk"}},"DeleteIdentities":{"input":{"type":"structure","required":["IdentityIdsToDelete"],"members":{"IdentityIdsToDelete":{"type":"list","member":{}}}},"output":{"type":"structure","members":{"UnprocessedIdentityIds":{"type":"list","member":{"type":"structure","members":{"IdentityId":{},"ErrorCode":{}}}}}}},"DeleteIdentityPool":{"input":{"type":"structure","required":["IdentityPoolId"],"members":{"IdentityPoolId":{}}}},"DescribeIdentity":{"input":{"type":"structure","required":["IdentityId"],"members":{"IdentityId":{}}},"output":{"shape":"Sv"}},"DescribeIdentityPool":{"input":{"type":"structure","required":["IdentityPoolId"],"members":{"IdentityPoolId":{}}},"output":{"shape":"Sk"}},"GetCredentialsForIdentity":{"input":{"type":"structure","required":["IdentityId"],"members":{"IdentityId":{},"Logins":{"shape":"S10"},"CustomRoleArn":{}}},"output":{"type":"structure","members":{"IdentityId":{},"Credentials":{"type":"structure","members":{"AccessKeyId":{},"SecretKey":{},"SessionToken":{},"Expiration":{"type":"timestamp"}}}}},"authtype":"none"},"GetId":{"input":{"type":"structure","required":["IdentityPoolId"],"members":{"AccountId":{},"IdentityPoolId":{},"Logins":{"shape":"S10"}}},"output":{"type":"structure","members":{"IdentityId":{}}},"authtype":"none"},"GetIdentityPoolRoles":{"input":{"type":"structure","required":["IdentityPoolId"],"members":{"IdentityPoolId":{}}},"output":{"type":"structure","members":{"IdentityPoolId":{},"Roles":{"shape":"S1c"},"RoleMappings":{"shape":"S1e"}}}},"GetOpenIdToken":{"input":{"type":"structure","required":["IdentityId"],"members":{"IdentityId":{},"Logins":{"shape":"S10"}}},"output":{"type":"structure","members":{"IdentityId":{},"Token":{}}},"authtype":"none"},"GetOpenIdTokenForDeveloperIdentity":{"input":{"type":"structure","required":["IdentityPoolId","Logins"],"members":{"IdentityPoolId":{},"IdentityId":{},"Logins":{"shape":"S10"},"PrincipalTags":{"shape":"S1s"},"TokenDuration":{"type":"long"}}},"output":{"type":"structure","members":{"IdentityId":{},"Token":{}}}},"GetPrincipalTagAttributeMap":{"input":{"type":"structure","required":["IdentityPoolId","IdentityProviderName"],"members":{"IdentityPoolId":{},"IdentityProviderName":{}}},"output":{"type":"structure","members":{"IdentityPoolId":{},"IdentityProviderName":{},"UseDefaults":{"type":"boolean"},"PrincipalTags":{"shape":"S1s"}}}},"ListIdentities":{"input":{"type":"structure","required":["IdentityPoolId","MaxResults"],"members":{"IdentityPoolId":{},"MaxResults":{"type":"integer"},"NextToken":{},"HideDisabled":{"type":"boolean"}}},"output":{"type":"structure","members":{"IdentityPoolId":{},"Identities":{"type":"list","member":{"shape":"Sv"}},"NextToken":{}}}},"ListIdentityPools":{"input":{"type":"structure","required":["MaxResults"],"members":{"MaxResults":{"type":"integer"},"NextToken":{}}},"output":{"type":"structure","members":{"IdentityPools":{"type":"list","member":{"type":"structure","members":{"IdentityPoolId":{},"IdentityPoolName":{}}}},"NextToken":{}}}},"ListTagsForResource":{"input":{"type":"structure","required":["ResourceArn"],"members":{"ResourceArn":{}}},"output":{"type":"structure","members":{"Tags":{"shape":"Sh"}}}},"LookupDeveloperIdentity":{"input":{"type":"structure","required":["IdentityPoolId"],"members":{"IdentityPoolId":{},"IdentityId":{},"DeveloperUserIdentifier":{},"MaxResults":{"type":"integer"},"NextToken":{}}},"output":{"type":"structure","members":{"IdentityId":{},"DeveloperUserIdentifierList":{"type":"list","member":{}},"NextToken":{}}}},"MergeDeveloperIdentities":{"input":{"type":"structure","required":["SourceUserIdentifier","DestinationUserIdentifier","DeveloperProviderName","IdentityPoolId"],"members":{"SourceUserIdentifier":{},"DestinationUserIdentifier":{},"DeveloperProviderName":{},"IdentityPoolId":{}}},"output":{"type":"structure","members":{"IdentityId":{}}}},"SetIdentityPoolRoles":{"input":{"type":"structure","required":["IdentityPoolId","Roles"],"members":{"IdentityPoolId":{},"Roles":{"shape":"S1c"},"RoleMappings":{"shape":"S1e"}}}},"SetPrincipalTagAttributeMap":{"input":{"type":"structure","required":["IdentityPoolId","IdentityProviderName"],"members":{"IdentityPoolId":{},"IdentityProviderName":{},"UseDefaults":{"type":"boolean"},"PrincipalTags":{"shape":"S1s"}}},"output":{"type":"structure","members":{"IdentityPoolId":{},"IdentityProviderName":{},"UseDefaults":{"type":"boolean"},"PrincipalTags":{"shape":"S1s"}}}},"TagResource":{"input":{"type":"structure","required":["ResourceArn","Tags"],"members":{"ResourceArn":{},"Tags":{"shape":"Sh"}}},"output":{"type":"structure","members":{}}},"UnlinkDeveloperIdentity":{"input":{"type":"structure","required":["IdentityId","IdentityPoolId","DeveloperProviderName","DeveloperUserIdentifier"],"members":{"IdentityId":{},"IdentityPoolId":{},"DeveloperProviderName":{},"DeveloperUserIdentifier":{}}}},"UnlinkIdentity":{"input":{"type":"structure","required":["IdentityId","Logins","LoginsToRemove"],"members":{"IdentityId":{},"Logins":{"shape":"S10"},"LoginsToRemove":{"shape":"Sw"}}},"authtype":"none"},"UntagResource":{"input":{"type":"structure","required":["ResourceArn","TagKeys"],"members":{"ResourceArn":{},"TagKeys":{"type":"list","member":{}}}},"output":{"type":"structure","members":{}}},"UpdateIdentityPool":{"input":{"shape":"Sk"},"output":{"shape":"Sk"}}},"shapes":{"S5":{"type":"map","key":{},"value":{}},"S9":{"type":"list","member":{}},"Sb":{"type":"list","member":{"type":"structure","members":{"ProviderName":{},"ClientId":{},"ServerSideTokenCheck":{"type":"boolean"}}}},"Sg":{"type":"list","member":{}},"Sh":{"type":"map","key":{},"value":{}},"Sk":{"type":"structure","required":["IdentityPoolId","IdentityPoolName","AllowUnauthenticatedIdentities"],"members":{"IdentityPoolId":{},"IdentityPoolName":{},"AllowUnauthenticatedIdentities":{"type":"boolean"},"AllowClassicFlow":{"type":"boolean"},"SupportedLoginProviders":{"shape":"S5"},"DeveloperProviderName":{},"OpenIdConnectProviderARNs":{"shape":"S9"},"CognitoIdentityProviders":{"shape":"Sb"},"SamlProviderARNs":{"shape":"Sg"},"IdentityPoolTags":{"shape":"Sh"}}},"Sv":{"type":"structure","members":{"IdentityId":{},"Logins":{"shape":"Sw"},"CreationDate":{"type":"timestamp"},"LastModifiedDate":{"type":"timestamp"}}},"Sw":{"type":"list","member":{}},"S10":{"type":"map","key":{},"value":{}},"S1c":{"type":"map","key":{},"value":{}},"S1e":{"type":"map","key":{},"value":{"type":"structure","required":["Type"],"members":{"Type":{},"AmbiguousRoleResolution":{},"RulesConfiguration":{"type":"structure","required":["Rules"],"members":{"Rules":{"type":"list","member":{"type":"structure","required":["Claim","MatchType","Value","RoleARN"],"members":{"Claim":{},"MatchType":{},"Value":{},"RoleARN":{}}}}}}}}},"S1s":{"type":"map","key":{},"value":{}}}}; -/***/ }), -/* 57 */, -/* 58 */, -/* 59 */, -/* 60 */, -/* 61 */, -/* 62 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 5438: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -Object.defineProperty(exports, "v1", { - enumerable: true, - get: function () { - return _v.default; - } -}); -Object.defineProperty(exports, "v3", { - enumerable: true, - get: function () { - return _v2.default; - } -}); -Object.defineProperty(exports, "v4", { - enumerable: true, - get: function () { - return _v3.default; - } -}); -Object.defineProperty(exports, "v5", { - enumerable: true, - get: function () { - return _v4.default; - } +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; }); - -var _v = _interopRequireDefault(__webpack_require__(893)); - -var _v2 = _interopRequireDefault(__webpack_require__(209)); - -var _v3 = _interopRequireDefault(__webpack_require__(733)); - -var _v4 = _interopRequireDefault(__webpack_require__(384)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), -/* 63 */, -/* 64 */, -/* 65 */ -/***/ (function(module) { - -// Generated by CoffeeScript 1.12.7 -(function() { - module.exports = { - Disconnected: 1, - Preceding: 2, - Following: 4, - Contains: 8, - ContainedBy: 16, - ImplementationSpecific: 32 - }; - -}).call(this); - - -/***/ }), -/* 66 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); - -var inherit = AWS.util.inherit; - +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(4087)); +const utils_1 = __nccwpck_require__(3030); +exports.context = new Context.Context(); /** - * @api private + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -AWS.Signers.RequestSigner = inherit({ - constructor: function RequestSigner(request) { - this.request = request; - }, - - setServiceClientId: function setServiceClientId(id) { - this.serviceClientId = id; - }, - - getServiceClientId: function getServiceClientId() { - return this.serviceClientId; - } -}); +function getOctokit(token, options) { + return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map -AWS.Signers.RequestSigner.getVersion = function getVersion(version) { - switch (version) { - case 'v2': return AWS.Signers.V2; - case 'v3': return AWS.Signers.V3; - case 's3v4': return AWS.Signers.V4; - case 'v4': return AWS.Signers.V4; - case 's3': return AWS.Signers.S3; - case 'v3https': return AWS.Signers.V3Https; - case 'bearer': return AWS.Signers.Bearer; - } - throw new Error('Unknown signing version ' + version); -}; +/***/ }), -__webpack_require__(220); -__webpack_require__(791); -__webpack_require__(566); -__webpack_require__(754); -__webpack_require__(616); -__webpack_require__(951); -__webpack_require__(143); - - -/***/ }), -/* 67 */, -/* 68 */, -/* 69 */, -/* 70 */, -/* 71 */, -/* 72 */, -/* 73 */, -/* 74 */, -/* 75 */, -/* 76 */, -/* 77 */, -/* 78 */ -/***/ (function(module) { +/***/ 7914: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -module.exports = opts => { - opts = opts || {}; - - const env = opts.env || process.env; - const platform = opts.platform || process.platform; - - if (platform !== 'win32') { - return 'PATH'; - } - - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; - - -/***/ }), -/* 79 */, -/* 80 */, -/* 81 */, -/* 82 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(9925)); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); } - else if (typeof input === 'string' || input instanceof String) { - return input; + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); } - return JSON.stringify(input); + return typeof options.auth === 'string' ? options.auth : `token ${token}`; } -exports.toCommandValue = toCommandValue; +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; //# sourceMappingURL=utils.js.map /***/ }), -/* 83 */, -/* 84 */, -/* 85 */, -/* 86 */, -/* 87 */ -/***/ (function(module) { - -module.exports = require("os"); -/***/ }), -/* 88 */, -/* 89 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 3030: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -var AWS = __webpack_require__(395); -__webpack_require__(711); -var inherit = AWS.util.inherit; -var getMetadataServiceEndpoint = __webpack_require__(756); -var URL = __webpack_require__(835).URL; +"use strict"; -/** - * Represents a metadata service available on EC2 instances. Using the - * {request} method, you can receieve metadata about any available resource - * on the metadata service. - * - * You can disable the use of the IMDS by setting the AWS_EC2_METADATA_DISABLED - * environment variable to a truthy value. - * - * @!attribute [r] httpOptions - * @return [map] a map of options to pass to the underlying HTTP request: - * - * * **timeout** (Number) — a timeout value in milliseconds to wait - * before aborting the connection. Set to 0 for no timeout. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(4087)); +const Utils = __importStar(__nccwpck_require__(7914)); +// octokit + plugins +const core_1 = __nccwpck_require__(6762); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); +const plugin_paginate_rest_1 = __nccwpck_require__(4193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. * - * @!macro nobrowser + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -AWS.MetadataService = inherit({ - /** - * @return [String] the endpoint of the instance metadata service - */ - endpoint: getMetadataServiceEndpoint(), +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map - /** - * @!ignore - */ +/***/ }), - /** - * Default HTTP options. By default, the metadata service is set to not - * timeout on long requests. This means that on non-EC2 machines, this - * request will never return. If you are calling this operation from an - * environment that may not always run on EC2, set a `timeout` value so - * the SDK will abort the request after a given number of milliseconds. - */ - httpOptions: { timeout: 0 }, +/***/ 9925: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * when enabled, metadata service will not fetch token - */ - disableFetchToken: false, +"use strict"; - /** - * Creates a new MetadataService object with a given set of options. - * - * @option options host [String] the hostname of the instance metadata - * service - * @option options httpOptions [map] a map of options to pass to the - * underlying HTTP request: - * - * * **timeout** (Number) — a timeout value in milliseconds to wait - * before aborting the connection. Set to 0 for no timeout. - * @option options maxRetries [Integer] the maximum number of retries to - * perform for timeout errors - * @option options retryDelayOptions [map] A set of options to configure the - * retry delay on retryable errors. See AWS.Config for details. - */ - constructor: function MetadataService(options) { - if (options && options.host) { - options.endpoint = 'http://' + options.host; - delete options.host; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const pm = __nccwpck_require__(6443); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); } - AWS.util.update(this, options); - }, - - /** - * Sends a request to the instance metadata service for a given resource. - * - * @param path [String] the path of the resource to get - * - * @param options [map] an optional map used to make request - * - * * **method** (String) — HTTP request method - * - * * **headers** (map) — a map of response header keys and their respective values - * - * @callback callback function(err, data) - * Called when a response is available from the service. - * @param err [Error, null] if an error occurred, this value will be set - * @param data [String, null] if the request was successful, the body of - * the response - */ - request: function request(path, options, callback) { - if (arguments.length === 2) { - callback = options; - options = {}; +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; } - - if (process.env[AWS.util.imdsDisabledEnv]) { - callback(new Error('EC2 Instance Metadata Service access disabled')); - return; + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); } - - path = path || '/'; - - // Verify that host is a valid URL - if (URL) { new URL(this.endpoint); } - - var httpRequest = new AWS.HttpRequest(this.endpoint + path); - httpRequest.method = options.method || 'GET'; - if (options.headers) { - httpRequest.headers = options.headers; +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } } - AWS.util.handleRequestWithRetries(httpRequest, this, callback); - }, - - /** - * @api private - */ - loadCredentialsCallbacks: [], - - /** - * Fetches metadata token used for getting credentials - * - * @api private - * @callback callback function(err, token) - * Called when token is loaded from the resource - */ - fetchMetadataToken: function fetchMetadataToken(callback) { - var self = this; - var tokenFetchPath = '/latest/api/token'; - self.request( - tokenFetchPath, - { - 'method': 'PUT', - 'headers': { - 'x-aws-ec2-metadata-token-ttl-seconds': '21600' + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); } - }, - callback - ); - }, - - /** - * Fetches credentials - * - * @api private - * @callback cb function(err, creds) - * Called when credentials are loaded from the resource - */ - fetchCredentials: function fetchCredentials(options, cb) { - var self = this; - var basePath = '/latest/meta-data/iam/security-credentials/'; - - self.request(basePath, options, function (err, roleName) { - if (err) { - self.disableFetchToken = !(err.statusCode === 401); - cb(AWS.util.error( - err, - { - message: 'EC2 Metadata roleName request returned error' - } - )); - return; - } - roleName = roleName.split('\n')[0]; // grab first (and only) role - self.request(basePath + roleName, options, function (credErr, credData) { - if (credErr) { - self.disableFetchToken = !(credErr.statusCode === 401); - cb(AWS.util.error( - credErr, - { - message: 'EC2 Metadata creds request returned error' + let parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); } - )); - return; - } - try { - var credentials = JSON.parse(credData); - cb(null, credentials); - } catch (parseError) { - cb(parseError); } - }); - }); - }, - - /** - * Loads a set of credentials stored in the instance metadata service - * - * @api private - * @callback callback function(err, credentials) - * Called when credentials are loaded from the resource - * @param err [Error] if an error occurred, this value will be set - * @param credentials [Object] the raw JSON object containing all - * metadata from the credentials resource - */ - loadCredentials: function loadCredentials(callback) { - var self = this; - self.loadCredentialsCallbacks.push(callback); - if (self.loadCredentialsCallbacks.length > 1) { return; } - - function callbacks(err, creds) { - var cb; - while ((cb = self.loadCredentialsCallbacks.shift()) !== undefined) { - cb(err, creds); - } + return response; } - - if (self.disableFetchToken) { - self.fetchCredentials({}, callbacks); - } else { - self.fetchMetadataToken(function(tokenError, token) { - if (tokenError) { - if (tokenError.code === 'TimeoutError') { - self.disableFetchToken = true; - } else if (tokenError.retryable === true) { - callbacks(AWS.util.error( - tokenError, - { - message: 'EC2 Metadata token request returned error' - } - )); - return; - } else if (tokenError.statusCode === 400) { - callbacks(AWS.util.error( - tokenError, - { - message: 'EC2 Metadata token request returned 400' - } - )); - return; - } - } - var options = {}; - if (token) { - options.headers = { - 'x-aws-ec2-metadata-token': token - }; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); } - self.fetchCredentials(options, callbacks); - }); - + this._disposed = true; } - } -}); - -/** - * @api private - */ -module.exports = AWS.MetadataService; - - -/***/ }), -/* 90 */, -/* 91 */, -/* 92 */, -/* 93 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = minimatch -minimatch.Minimatch = Minimatch - -var path = { sep: '/' } -try { - path = __webpack_require__(622) -} catch (er) {} - -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __webpack_require__(95) - -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __nccwpck_require__(4294); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + ...((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + }), + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; -// * => any number of characters -var star = qmark + '*?' -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' +/***/ }), -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' +/***/ 6443: +/***/ ((__unused_webpack_module, exports) => { -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') +"use strict"; -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) +Object.defineProperty(exports, "__esModule", ({ value: true })); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; + } + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = new URL(proxyVar); + } + return proxyUrl; } - -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; } +exports.checkBypass = checkBypass; -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - return t -} -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch +/***/ }), - var orig = minimatch +/***/ 334: +/***/ ((__unused_webpack_module, exports) => { - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) - } +"use strict"; - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - return m -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; } -function minimatch (p, pattern, options) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - // "" only matches "" - if (pattern.trim() === '') return p === '' - - return new Minimatch(pattern, options).match(p) + return `token ${token}`; } -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } - - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - pattern = pattern.trim() - - // windows support: need to use /, not \ - if (path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } - - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - - // make the set of regexps etc. - this.make() +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); } -Minimatch.prototype.debug = function () {} - -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return - - var pattern = this.pattern - var options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - if (!pattern) { - this.empty = true - return + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); } - // step 1: figure out negation, etc. - this.parseNegate() + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; - // step 2: expand braces - var set = this.globSet = this.braceExpand() +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map - if (options.debug) this.debug = console.error - this.debug(this.pattern, set) +/***/ }), - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) +/***/ 6762: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this.debug(this.pattern, set) +"use strict"; - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) - this.debug(this.pattern, set) +Object.defineProperty(exports, "__esModule", ({ value: true })); - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) +var universalUserAgent = __nccwpck_require__(5030); +var beforeAfterHook = __nccwpck_require__(3682); +var request = __nccwpck_require__(6234); +var graphql = __nccwpck_require__(8467); +var authToken = __nccwpck_require__(334); - this.debug(this.pattern, set) +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } - this.set = set + return obj; } -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 - - if (options.nonegate) return +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); } - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) + return keys; } -Minimatch.prototype.braceExpand = braceExpand +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { - options = {} + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); } } - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - - if (typeof pattern === 'undefined') { - throw new TypeError('undefined pattern') - } - - if (options.nobrace || - !pattern.match(/\{.*\}/)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) + return target; } -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - if (pattern.length > 1024 * 64) { - throw new TypeError('pattern is too long') - } +const VERSION = "2.5.3"; - var options = this.options +let Octokit = +/** @class */ +(() => { + class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set - // shortcuts - if (!options.noglobstar && pattern === '**') return GLOBSTAR - if (pattern === '') return '' + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { + baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") + })); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. - switch (c) { - case '/': - // completely not allowed, even escaped. - // Should already be path-split by now. - return false + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - case '\\': - clearStateChar() - escaping = true - continue + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const auth = options.authStrategy(Object.assign({ + request: this.request + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } - case '(': - if (inClass) { - re += '(' - continue + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); } - if (!stateChar) { - re += '\\(' - continue - } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } + static plugin(p1, ...p2) { + var _a; - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue + if (p1 instanceof Array) { + console.warn(["Passing an array of plugins to Octokit.plugin() has been deprecated.", "Instead of:", " Octokit.plugin([plugin1, plugin2, ...])", "Use:", " Octokit.plugin(plugin1, plugin2, ...)"].join("\n")); + } - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } + const currentPlugins = this.plugins; + let newPlugins = [...(p1 instanceof Array ? p1 : [p1]), ...p2]; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } - clearStateChar() - re += '|' - continue + } - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() + Octokit.VERSION = VERSION; + Octokit.plugins = []; + return Octokit; +})(); - if (inClass) { - re += '\\' + c - continue - } +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } +/***/ }), - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - if (inClass) { - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } - } +/***/ 9440: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // finish up the class. - hasMagic = true - inClass = false - re += c - continue +"use strict"; - default: - // swallow any state char that wasn't consumed - clearStateChar() - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - re += c +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - } // switch - } // for +var isPlainObject = _interopDefault(__nccwpck_require__(8840)); +var universalUserAgent = __nccwpck_require__(5030); - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] +function lowercaseKeys(object) { + if (!object) { + return {}; } - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } + options.headers = lowercaseKeys(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '.': - case '[': - case '(': addPatternStart = true + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); } - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] - - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} - nlLast += nlAfter +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter + if (names.length === 0) { + return url; + } - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} - if (addPatternStart) { - re = patternStart + re - } +const urlVariableRegex = /\{[^}]+\}/g; - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') + if (!matches) { + return []; } - regExp._glob = pattern - regExp._src = re - - return regExp + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); } -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp - } - var options = this.options - - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' - - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' + return part; + }).join(""); +} - try { - this.regexp = new RegExp(re, flags) - } catch (ex) { - this.regexp = false - } - return this.regexp +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); } -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; } - return list } -Minimatch.prototype.match = match -function match (f, partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true +function isDefined(value) { + return value !== undefined && value !== null; +} - var options = this.options +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } - var set = this.set - this.debug(this.pattern, 'set', set) + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); } } - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate + return result; } -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; - this.debug(pattern, p, f) + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } - // should be impossible. - // some invalid regexp stuff in the set. - if (p === false) return false + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) + if (operator && operator !== "+") { + var separator = ","; - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; } - return true + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } + } else { + return encodeReserved(literal); + } + }); +} - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequset) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); } - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - if (options.nocase) { - hit = f.toLowerCase() === p.toLowerCase() + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; } else { - hit = f === p + headers["content-length"] = 0; } - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) } + } // default content-type for JSON if body is set - if (!hit) return false - } - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') - return emptyFileEnd - } - // should be unreachable. - throw new Error('wtf?') -} + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') -} -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); } +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} -/***/ }), -/* 94 */, -/* 95 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} -var concatMap = __webpack_require__(896); -var balanced = __webpack_require__(284); +const VERSION = "6.0.2"; -module.exports = expandTop; +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} +const endpoint = withDefaults(null, DEFAULTS); -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} +/***/ }), -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; +/***/ 8467: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var parts = []; - var m = balanced('{', '}', str); +"use strict"; - if (!m) - return str.split(','); - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); +Object.defineProperty(exports, "__esModule", ({ value: true })); - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } +var request = __nccwpck_require__(6234); +var universalUserAgent = __nccwpck_require__(8908); - parts.push.apply(parts, p); +const VERSION = "4.3.1"; - return parts; -} +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) -function expandTop(str) { - if (!str) - return []; + /* istanbul ignore next */ - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } } - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function identity(e) { - return e; } -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query"]; +function graphql(request, query, options) { + options = typeof query === "string" ? options = Object.assign({ + query + }, options) : options = query; + const requestOptions = Object.keys(options).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = options[key]; + return result; + } + + if (!result.variables) { + result.variables = {}; + } + + result.variables[key] = options[key]; + return result; + }, {}); + return request(requestOptions).then(response => { + if (response.data.errors) { + throw new GraphqlError(requestOptions, { + data: response.data + }); + } + + return response.data.data; + }); } -function lte(i, y) { - return i <= y; +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); } -function gte(i, y) { - return i >= y; + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); } -function expand(str, isTop) { - var expansions = []; +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } +/***/ }), - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } +/***/ 8908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. +"use strict"; - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - var N; +Object.defineProperty(exports, "__esModule", ({ value: true })); - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - N = []; +var osName = _interopDefault(__nccwpck_require__(4824)); - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); +function getUserAgent() { + try { + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return "Windows "; } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } + throw error; } - - return expansions; } +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map /***/ }), -/* 96 */, -/* 97 */, -/* 98 */, -/* 99 */, -/* 100 */, -/* 101 */, -/* 102 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/***/ 4193: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// For internal use, subject to change. -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__webpack_require__(747)); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(82); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map -/***/ }), -/* 103 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { +Object.defineProperty(exports, "__esModule", ({ value: true })); -var AWS = __webpack_require__(395); -__webpack_require__(89); +const VERSION = "2.2.1"; /** - * Represents credentials received from the metadata service on an EC2 instance. - * - * By default, this class will connect to the metadata service using - * {AWS.MetadataService} and attempt to load any available credentials. If it - * can connect, and credentials are available, these will be used with zero - * configuration. - * - * This credentials class will by default timeout after 1 second of inactivity - * and retry 3 times. - * If your requests to the EC2 metadata service are timing out, you can increase - * these values by configuring them directly: - * - * ```javascript - * AWS.config.credentials = new AWS.EC2MetadataCredentials({ - * httpOptions: { timeout: 5000 }, // 5 second timeout - * maxRetries: 10, // retry 10 times - * retryDelayOptions: { base: 200 }, // see AWS.Config for information - * logger: console // see AWS.Config for information - * }); - * ``` - * - * If your requests are timing out in connecting to the metadata service, such - * as when testing on a development machine, you can use the connectTimeout - * option, specified in milliseconds, which also defaults to 1 second. - * - * If the requests failed or returns expired credentials, it will - * extend the expiration of current credential, with a warning message. For more - * information, please go to: - * https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html + * Some “list” response that can be paginated have a different response structure * - * @!attribute originalExpiration - * @return [Date] The optional original expiration of the current credential. - * In case of AWS outage, the EC2 metadata will extend expiration of the - * existing credential. + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. * - * @see AWS.Config.retryDelayOptions - * @see AWS.Config.logger + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. * - * @!macro nobrowser + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref */ -AWS.EC2MetadataCredentials = AWS.util.inherit(AWS.Credentials, { - constructor: function EC2MetadataCredentials(options) { - AWS.Credentials.call(this); +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. - options = options ? AWS.util.copy(options) : {}; - options = AWS.util.merge( - {maxRetries: this.defaultMaxRetries}, options); - if (!options.httpOptions) options.httpOptions = {}; - options.httpOptions = AWS.util.merge( - {timeout: this.defaultTimeout, - connectTimeout: this.defaultConnectTimeout}, - options.httpOptions); + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; - this.metadataService = new AWS.MetadataService(options); - this.logger = options.logger || AWS.config && AWS.config.logger; - }, + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } - /** - * @api private - */ - defaultTimeout: 1000, + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } - /** - * @api private - */ - defaultConnectTimeout: 1000, + response.data.total_count = totalCount; + return response; +} - /** - * @api private - */ - defaultMaxRetries: 3, +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + next() { + if (!url) { + return Promise.resolve({ + done: true + }); + } - /** - * The original expiration of the current credential. In case of AWS - * outage, the EC2 metadata will extend expiration of the existing - * credential. - */ - originalExpiration: undefined, + return requestMethod({ + method, + url, + headers + }).then(normalizePaginatedListResponse).then(response => { + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: response + }; + }); + } - /** - * Loads the credentials from the instance metadata service - * - * @callback callback function(err) - * Called when the instance metadata service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, + }) + }; +} - /** - * @api private - * @param callback - */ - load: function load(callback) { - var self = this; - self.metadataService.loadCredentials(function(err, creds) { - if (err) { - if (self.hasLoadedCredentials()) { - self.extendExpirationIfExpired(); - callback(); - } else { - callback(err); - } - } else { - self.setCredentials(creds); - self.extendExpirationIfExpired(); - callback(); - } - }); - }, +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } - /** - * Whether this credential has been loaded. - * @api private - */ - hasLoadedCredentials: function hasLoadedCredentials() { - return this.AccessKeyId && this.secretAccessKey; - }, + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} - /** - * if expired, extend the expiration by 15 minutes base plus a jitter of 5 - * minutes range. - * @api private - */ - extendExpirationIfExpired: function extendExpirationIfExpired() { - if (this.needsRefresh()) { - this.originalExpiration = this.originalExpiration || this.expireTime; - this.expired = false; - var nextTimeout = 15 * 60 + Math.floor(Math.random() * 5 * 60); - var currentTime = AWS.util.date.getDate().getTime(); - this.expireTime = new Date(currentTime + nextTimeout * 1000); - // TODO: add doc link; - this.logger.warn('Attempting credential expiration extension due to a ' - + 'credential service availability issue. A refresh of these ' - + 'credentials will be attempted again at ' + this.expireTime - + '\nFor more information, please visit: https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html'); +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; } - }, - /** - * Update the credential with new credential responded from EC2 metadata - * service. - * @api private - */ - setCredentials: function setCredentials(creds) { - var currentTime = AWS.util.date.getDate().getTime(); - var expireTime = new Date(creds.Expiration); - this.expired = currentTime >= expireTime ? true : false; - this.metadata = creds; - this.accessKeyId = creds.AccessKeyId; - this.secretAccessKey = creds.SecretAccessKey; - this.sessionToken = creds.Token; - this.expireTime = expireTime; - } -}); + let earlyExit = false; + function done() { + earlyExit = true; + } -/***/ }), -/* 104 */, -/* 105 */, -/* 106 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); -__webpack_require__(234); -var AWS = __webpack_require__(395); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; + if (earlyExit) { + return results; + } -apiLoader.services['sts'] = {}; -AWS.STS = Service.defineService('sts', ['2011-06-15']); -__webpack_require__(861); -Object.defineProperty(apiLoader.services['sts'], '2011-06-15', { - get: function get() { - var model = __webpack_require__(715); - model.paginators = __webpack_require__(270).pagination; - return model; - }, - enumerable: true, - configurable: true -}); + return gather(octokit, results, iterator, mapFn); + }); +} -module.exports = AWS.STS; +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.paginateRest = paginateRest; +//# sourceMappingURL=index.js.map /***/ }), -/* 107 */, -/* 108 */, -/* 109 */, -/* 110 */, -/* 111 */, -/* 112 */, -/* 113 */, -/* 114 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { -var AWS = __webpack_require__(395); -var path = __webpack_require__(622); -var crypto = __webpack_require__(417); -var iniLoader = AWS.util.iniLoader; +/***/ 3044: +/***/ ((__unused_webpack_module, exports) => { -/** - * Represents credentials from sso.getRoleCredentials API for - * `sso_*` values defined in shared credentials file. - * - * ## Using SSO credentials - * - * The credentials file must specify the information below to use sso: - * - * [profile sso-profile] - * sso_account_id = 012345678901 - * sso_region = **-****-* - * sso_role_name = SampleRole - * sso_start_url = https://d-******.awsapps.com/start - * - * or using the session format: - * - * [profile sso-token] - * sso_session = prod - * sso_account_id = 012345678901 - * sso_role_name = SampleRole - * - * [sso-session prod] - * sso_region = **-****-* - * sso_start_url = https://d-******.awsapps.com/start - * - * This information will be automatically added to your shared credentials file by running - * `aws configure sso`. - * - * ## Using custom profiles - * - * The SDK supports loading credentials for separate profiles. This can be done - * in two ways: - * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to - * loading the SDK. - * 2. Directly load the AWS.SsoCredentials provider: - * - * ```javascript - * var creds = new AWS.SsoCredentials({profile: 'myprofile'}); - * AWS.config.credentials = creds; - * ``` - * - * @!macro nobrowser - */ -AWS.SsoCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new SsoCredentials object. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var) - * the filename to use when loading credentials. - * @option options callback [Function] (err) Credentials are eagerly loaded - * by the constructor. When the callback is called with no error, the - * credentials have been loaded successfully. - */ - constructor: function SsoCredentials(options) { - AWS.Credentials.call(this); +"use strict"; - options = options || {}; - this.errorCode = 'SsoCredentialsProviderFailure'; - this.expired = true; - this.filename = options.filename; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.service = options.ssoClient; - this.httpOptions = options.httpOptions || null; - this.get(options.callback || AWS.util.fn.noop); - }, - - /** - * @api private - */ - load: function load(callback) { - var self = this; +Object.defineProperty(exports, "__esModule", ({ value: true })); - try { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' not found'), - { code: self.errorCode } - ); +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" } - - if (profile.sso_session) { - if (!profile.sso_account_id || !profile.sso_role_name) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' with session ' + profile.sso_session + - ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_session", ' + - '"sso_role_name". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), - { code: self.errorCode } - ); - } - } else { - if (!profile.sso_start_url || !profile.sso_account_id || !profile.sso_region || !profile.sso_role_name) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_region", ' + - '"sso_role_name", "sso_start_url". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), - { code: self.errorCode } - ); - } + }], + createOrUpdateSecretForRepo: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "createOrUpdateRepoSecret"], + renamedParameters: { + name: "secret_name" } - - this.getToken(this.profile, profile, function (err, token) { - if (err) { - return callback(err); - } - var request = { - accessToken: token, - accountId: profile.sso_account_id, - roleName: profile.sso_role_name, - }; - - if (!self.service || self.service.config.region !== profile.sso_region) { - self.service = new AWS.SSO({ - region: profile.sso_region, - httpOptions: self.httpOptions, - }); - } - - self.service.getRoleCredentials(request, function(err, data) { - if (err || !data || !data.roleCredentials) { - callback(AWS.util.error( - err || new Error('Please log in using "aws sso login"'), - { code: self.errorCode } - ), null); - } else if (!data.roleCredentials.accessKeyId || !data.roleCredentials.secretAccessKey || !data.roleCredentials.sessionToken || !data.roleCredentials.expiration) { - throw AWS.util.error(new Error( - 'SSO returns an invalid temporary credential.' - )); - } else { - self.expired = false; - self.accessKeyId = data.roleCredentials.accessKeyId; - self.secretAccessKey = data.roleCredentials.secretAccessKey; - self.sessionToken = data.roleCredentials.sessionToken; - self.expireTime = new Date(data.roleCredentials.expiration); - callback(null); - } - }); - }); - } catch (err) { - callback(err); - } - }, - - /** - * @private - * Uses legacy file system retrieval or if sso-session is set, - * use the SSOTokenProvider. - * - * @param {string} profileName - name of the profile. - * @param {object} profile - profile data containing sso_session or sso_start_url etc. - * @param {function} callback - called with (err, (string) token). - * - * @returns {void} - */ - getToken: function getToken(profileName, profile, callback) { - var self = this; - - if (profile.sso_session) { - var _iniLoader = AWS.util.iniLoader; - var ssoSessions = _iniLoader.loadSsoSessionsFrom(); - var ssoSession = ssoSessions[profile.sso_session]; - Object.assign(profile, ssoSession); - - var ssoTokenProvider = new AWS.SSOTokenProvider({ - profile: profileName, - }); - ssoTokenProvider.load(function (err) { - if (err) { - return callback(err); - } - return callback(null, ssoTokenProvider.token); - }); - return; - } - - try { - /** - * The time window (15 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. - * This is needed because server side may have invalidated the token before the defined expiration date. - */ - var EXPIRE_WINDOW_MS = 15 * 60 * 1000; - var hasher = crypto.createHash('sha1'); - var fileName = hasher.update(profile.sso_start_url).digest('hex') + '.json'; - var cachePath = path.join( - iniLoader.getHomeDir(), - '.aws', - 'sso', - 'cache', - fileName - ); - var cacheFile = AWS.util.readFileSync(cachePath); - var cacheContent = null; - if (cacheFile) { - cacheContent = JSON.parse(cacheFile); + }], + createRegistrationToken: ["POST /repos/{owner}/{repo}/actions/runners/registration-token", {}, { + renamed: ["actions", "createRegistrationTokenForRepo"] + }], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveToken: ["POST /repos/{owner}/{repo}/actions/runners/remove-token", {}, { + renamed: ["actions", "createRemoveTokenForRepo"] + }], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" } - if (!cacheContent) { - throw AWS.util.error( - new Error('Cached credentials not found under ' + this.profile + ' profile. Please make sure you log in with aws sso login first'), - { code: self.errorCode } - ); + }], + deleteSecretFromRepo: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "deleteRepoSecret"], + renamedParameters: { + name: "secret_name" } - - if (!cacheContent.startUrl || !cacheContent.region || !cacheContent.accessToken || !cacheContent.expiresAt) { - throw AWS.util.error( - new Error('Cached credentials are missing required properties. Try running aws sso login.') - ); + }], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { + renamed: ["actions", "downloadJobLogsForWorkflowRun"] + }], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key", {}, { + renamed: ["actions", "getRepoPublicKey"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" } - - if (new Date(cacheContent.expiresAt).getTime() - Date.now() <= EXPIRE_WINDOW_MS) { - throw AWS.util.error(new Error( - 'The SSO session associated with this profile has expired. To refresh this SSO session run aws sso login with the corresponding profile.' - )); + }], + getSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "getRepoSecret"], + renamedParameters: { + name: "secret_name" } - - return callback(null, cacheContent.accessToken); - } catch (err) { - return callback(err, null); - } - }, - - /** - * Loads the credentials from the AWS SSO process - * - * @callback callback function(err) - * Called after the AWS SSO process has been executed. When this - * callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh(callback || AWS.util.fn.callback); + }], + getSelfHostedRunner: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { + renamed: ["actions", "getSelfHostedRunnerForRepo"] + }], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowJob: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}", {}, { + renamed: ["actions", "getJobForWorkflowRun"] + }], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listDownloadsForSelfHostedRunnerApplication: ["GET /repos/{owner}/{repo}/actions/runners/downloads", {}, { + renamed: ["actions", "listRunnerApplicationsForRepo"] + }], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/runs", {}, { + renamed: ["actions", "listWorkflowRunsForRepo"] + }], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSecretsForRepo: ["GET /repos/{owner}/{repo}/actions/secrets", {}, { + renamed: ["actions", "listRepoSecrets"] + }], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { + renamed: ["actions", "downloadWorkflowJobLogs"] + }], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", {}, { + renamed: ["actions", "downloadWorkflowRunLogs"] + }], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + removeSelfHostedRunner: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { + renamed: ["actions", "deleteSelfHostedRunnerFromRepo"] + }], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] }, -}); - - -/***/ }), -/* 115 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); - -/** - * Creates a token provider chain that searches for token in a list of - * token providers specified by the {providers} property. - * - * By default, the chain will use the {defaultProviders} to resolve token. - * - * ## Setting Providers - * - * Each provider in the {providers} list should be a function that returns - * a {AWS.Token} object, or a hardcoded token object. The function - * form allows for delayed execution of the Token construction. - * - * ## Resolving Token from a Chain - * - * Call {resolve} to return the first valid token object that can be - * loaded by the provider chain. - * - * For example, to resolve a chain with a custom provider that checks a file - * on disk after the set of {defaultProviders}: - * - * ```javascript - * var diskProvider = new FileTokenProvider('./token.json'); - * var chain = new AWS.TokenProviderChain(); - * chain.providers.push(diskProvider); - * chain.resolve(); - * ``` - * - * The above code will return the `diskProvider` object if the - * file contains token and the `defaultProviders` do not contain - * any token. - * - * @!attribute providers - * @return [Array] - * a list of token objects or functions that return token - * objects. If the provider is a function, the function will be - * executed lazily when the provider needs to be checked for valid - * token. By default, this object will be set to the {defaultProviders}. - * @see defaultProviders - */ -AWS.TokenProviderChain = AWS.util.inherit(AWS.Token, { - - /** - * Creates a new TokenProviderChain with a default set of providers - * specified by {defaultProviders}. - */ - constructor: function TokenProviderChain(providers) { - if (providers) { - this.providers = providers; - } else { - this.providers = AWS.TokenProviderChain.defaultProviders.slice(0); - } - this.resolveCallbacks = []; + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + checkStarringRepo: ["GET /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "checkRepoIsStarredByAuthenticatedUser"] + }], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscription: ["PUT /notifications", {}, { + renamed: ["activity", "getThreadSubscriptionForAuthenticatedUser"] + }], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listEventsForOrg: ["GET /users/{username}/events/orgs/{org}", {}, { + renamed: ["activity", "listOrgEventsForAuthenticatedUser"] + }], + listEventsForUser: ["GET /users/{username}/events", {}, { + renamed: ["activity", "listEventsForAuthenticatedUser"] + }], + listFeeds: ["GET /feeds", {}, { + renamed: ["activity", "getFeeds"] + }], + listNotifications: ["GET /notifications", {}, { + renamed: ["activity", "listNotificationsForAuthenticatedUser"] + }], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listNotificationsForRepo: ["GET /repos/{owner}/{repo}/notifications", {}, { + renamed: ["activity", "listRepoNotificationsForAuthenticatedUser"] + }], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForOrg: ["GET /orgs/{org}/events", {}, { + renamed: ["activity", "listPublicOrgEvents"] + }], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markAsRead: ["PUT /notifications", {}, { + renamed: ["activity", "markNotificationsAsRead"] + }], + markNotificationsAsRead: ["PUT /notifications"], + markNotificationsAsReadForRepo: ["PUT /repos/{owner}/{repo}/notifications", {}, { + renamed: ["activity", "markRepoNotificationsAsRead"] + }], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepo: ["PUT /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "starRepoForAuthenticatedUser"] + }], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepo: ["DELETE /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "unstarRepoForAuthenticatedUser"] + }], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, - - /** - * @!method resolvePromise() - * Returns a 'thenable' promise. - * Resolves the provider chain by searching for the first token in {providers}. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function(token) - * Called if the promise is fulfilled and the provider resolves the chain - * to a token object - * @param token [AWS.Token] the token object resolved by the provider chain. - * @callback rejectedCallback function(error) - * Called if the promise is rejected. - * @param err [Error] the error object returned if no token is found. - * @return [Promise] A promise that represents the state of the `resolve` method call. - * @example Calling the `resolvePromise` method. - * var promise = chain.resolvePromise(); - * promise.then(function(token) { ... }, function(err) { ... }); - */ - - /** - * Resolves the provider chain by searching for the first token in {providers}. - * - * @callback callback function(err, token) - * Called when the provider resolves the chain to a token object - * or null if no token can be found. - * - * @param err [Error] the error object returned if no token is found. - * @param token [AWS.Token] the token object resolved by the provider chain. - * @return [AWS.TokenProviderChain] the provider, for chaining. - */ - resolve: function resolve(callback) { - var self = this; - if (self.providers.length === 0) { - callback(new Error('No providers')); - return self; - } - - if (self.resolveCallbacks.push(callback) === 1) { - var index = 0; - var providers = self.providers.slice(0); - - function resolveNext(err, token) { - if ((!err && token) || index === providers.length) { - AWS.util.arrayEach(self.resolveCallbacks, function (callback) { - callback(err, token); - }); - self.resolveCallbacks.length = 0; - return; - } - - var provider = providers[index++]; - if (typeof provider === 'function') { - token = provider.call(); - } else { - token = provider; - } - - if (token.get) { - token.get(function (getErr) { - resolveNext(getErr, getErr ? null : token); - }); - } else { - resolveNext(null, token); - } + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] } - - resolveNext(); - } - - return self; - } -}); - -/** - * The default set of providers used by a vanilla TokenProviderChain. - * - * In the browser: - * - * ```javascript - * AWS.TokenProviderChain.defaultProviders = [] - * ``` - * - * In Node.js: - * - * ```javascript - * AWS.TokenProviderChain.defaultProviders = [ - * function () { return new AWS.SSOTokenProvider(); }, - * ] - * ``` - */ -AWS.TokenProviderChain.defaultProviders = []; - -/** - * @api private - */ -AWS.TokenProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); -}; - -/** - * @api private - */ -AWS.TokenProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.resolvePromise; -}; - -AWS.util.addPromises(AWS.TokenProviderChain); - - -/***/ }), -/* 116 */, -/* 117 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var isPlainObject = _interopDefault(__webpack_require__(696)); -var universalUserAgent = __webpack_require__(250); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); + }], + checkAccountIsAssociatedWithAny: ["GET /marketplace_listing/accounts/{account_id}", {}, { + renamed: ["apps", "getSubscriptionPlanForAccount"] + }], + checkAccountIsAssociatedWithAnyStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}", {}, { + renamed: ["apps", "getSubscriptionPlanForAccountStubbed"] + }], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); + }], + createInstallationToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); + }, { + renamed: ["apps", "createInstallationAccessToken"] + }], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); + }], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app", { + mediaType: { + previews: ["machine-man"] } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequset) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; + }], + getBySlug: ["GET /apps/{app_slug}", { + mediaType: { + previews: ["machine-man"] } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "5.5.3"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent + }], + getInstallation: ["GET /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + getOrgInstallation: ["GET /orgs/{org}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listAccountsUserOrOrgOnPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts", {}, { + renamed: ["apps", "listAccountsForPlan"] + }], + listAccountsUserOrOrgOnPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", {}, { + renamed: ["apps", "listAccountsForPlanStubbed"] + }], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallations: ["GET /app/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallationsForAuthenticatedUser: ["GET /user/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listMarketplacePurchasesForAuthenticatedUser: ["GET /user/marketplace_purchases", {}, { + renamed: ["apps", "listSubscriptionsForAuthenticatedUser"] + }], + listMarketplacePurchasesForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed", {}, { + renamed: ["apps", "listSubscriptionsForAuthenticatedUserStubbed"] + }], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listRepos: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }, { + renamed: ["apps", "listReposAccessibleToInstallation"] + }], + listReposAccessibleToInstallation: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + revokeInstallationToken: ["DELETE /installation/token", {}, { + renamed: ["apps", "revokeInstallationAccessToken"] + }], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 118 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); - -const nameMap = new Map([ - [19, 'Catalina'], - [18, 'Mojave'], - [17, 'High Sierra'], - [16, 'Sierra'], - [15, 'El Capitan'], - [14, 'Yosemite'], - [13, 'Mavericks'], - [12, 'Mountain Lion'], - [11, 'Lion'], - [10, 'Snow Leopard'], - [9, 'Leopard'], - [8, 'Tiger'], - [7, 'Panther'], - [6, 'Jaguar'], - [5, 'Puma'] -]); - -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); - return { - name: nameMap.get(release), - version: '10.' + (release - 4) - }; -}; - -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; - - -/***/ }), -/* 119 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const github = __importStar(__webpack_require__(469)); -const { GITHUB_TOKEN } = process.env; -exports.default = github.getOctokit(GITHUB_TOKEN, { previews: ['ant-man-preview', 'flash-preview'] }); - - -/***/ }), -/* 120 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -var LRU_1 = __webpack_require__(629); -var CACHE_SIZE = 1000; -/** - * Inspired node-lru-cache[https://github.com/isaacs/node-lru-cache] - */ -var EndpointCache = /** @class */ (function () { - function EndpointCache(maxSize) { - if (maxSize === void 0) { maxSize = CACHE_SIZE; } - this.maxSize = maxSize; - this.cache = new LRU_1.LRUCache(maxSize); - } - ; - Object.defineProperty(EndpointCache.prototype, "size", { - get: function () { - return this.cache.length; - }, - enumerable: true, - configurable: true - }); - EndpointCache.prototype.put = function (key, value) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - var endpointRecord = this.populateValue(value); - this.cache.put(keyString, endpointRecord); - }; - EndpointCache.prototype.get = function (key) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - var now = Date.now(); - var records = this.cache.get(keyString); - if (records) { - for (var i = records.length-1; i >= 0; i--) { - var record = records[i]; - if (record.Expire < now) { - records.splice(i, 1); - } - } - if (records.length === 0) { - this.cache.remove(keyString); - return undefined; - } - } - return records; - }; - EndpointCache.getKeyString = function (key) { - var identifiers = []; - var identifierNames = Object.keys(key).sort(); - for (var i = 0; i < identifierNames.length; i++) { - var identifierName = identifierNames[i]; - if (key[identifierName] === undefined) - continue; - identifiers.push(key[identifierName]); - } - return identifiers.join(' '); - }; - EndpointCache.prototype.populateValue = function (endpoints) { - var now = Date.now(); - return endpoints.map(function (endpoint) { return ({ - Address: endpoint.Address || '', - Expire: now + (endpoint.CachePeriodInMinutes || 1) * 60 * 1000 - }); }); - }; - EndpointCache.prototype.empty = function () { - this.cache.empty(); - }; - EndpointCache.prototype.remove = function (key) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - this.cache.remove(keyString); - }; - return EndpointCache; -}()); -exports.EndpointCache = EndpointCache; - -/***/ }), -/* 121 */, -/* 122 */, -/* 123 */, -/* 124 */, -/* 125 */, -/* 126 */, -/* 127 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__webpack_require__(539)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - -/***/ }), -/* 128 */, -/* 129 */ -/***/ (function(module) { - -module.exports = require("child_process"); - -/***/ }), -/* 130 */, -/* 131 */, -/* 132 */, -/* 133 */, -/* 134 */, -/* 135 */, -/* 136 */, -/* 137 */, -/* 138 */, -/* 139 */, -/* 140 */, -/* 141 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs", { + mediaType: { + previews: ["antiope"] } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } + }], + createSuite: ["POST /repos/{owner}/{repo}/check-suites", { + mediaType: { + previews: ["antiope"] } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), -/* 142 */, -/* 143 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); - -/** - * @api private - */ -AWS.Signers.Bearer = AWS.util.inherit(AWS.Signers.RequestSigner, { - constructor: function Bearer(request) { - AWS.Signers.RequestSigner.call(this, request); + }], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { + mediaType: { + previews: ["antiope"] + } + }], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { + mediaType: { + previews: ["antiope"] + } + }], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { + mediaType: { + previews: ["antiope"] + } + }], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { + mediaType: { + previews: ["antiope"] + } + }], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }] }, - - addAuthorization: function addAuthorization(token) { - this.request.headers['Authorization'] = 'Bearer ' + token.token; - } -}); - - -/***/ }), -/* 144 */, -/* 145 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const pump = __webpack_require__(453); -const bufferStream = __webpack_require__(966); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; - - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), -/* 146 */, -/* 147 */, -/* 148 */, -/* 149 */, -/* 150 */, -/* 151 */, -/* 152 */ -/***/ (function(module) { - -module.exports = {"version":2,"waiters":{"BucketExists":{"delay":5,"operation":"HeadBucket","maxAttempts":20,"acceptors":[{"expected":200,"matcher":"status","state":"success"},{"expected":301,"matcher":"status","state":"success"},{"expected":403,"matcher":"status","state":"success"},{"expected":404,"matcher":"status","state":"retry"}]},"BucketNotExists":{"delay":5,"operation":"HeadBucket","maxAttempts":20,"acceptors":[{"expected":404,"matcher":"status","state":"success"}]},"ObjectExists":{"delay":5,"operation":"HeadObject","maxAttempts":20,"acceptors":[{"expected":200,"matcher":"status","state":"success"},{"expected":404,"matcher":"status","state":"retry"}]},"ObjectNotExists":{"delay":5,"operation":"HeadObject","maxAttempts":20,"acceptors":[{"expected":404,"matcher":"status","state":"success"}]}}}; - -/***/ }), -/* 153 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -/* eslint guard-for-in:0 */ -var AWS; - -/** - * A set of utility methods for use with the AWS SDK. - * - * @!attribute abort - * Return this value from an iterator function {each} or {arrayEach} - * to break out of the iteration. - * @example Breaking out of an iterator function - * AWS.util.each({a: 1, b: 2, c: 3}, function(key, value) { - * if (key == 'b') return AWS.util.abort; - * }); - * @see each - * @see arrayEach - * @api private - */ -var util = { - environment: 'nodejs', - engine: function engine() { - if (util.isBrowser() && typeof navigator !== 'undefined') { - return navigator.userAgent; - } else { - var engine = process.platform + '/' + process.version; - if (process.env.AWS_EXECUTION_ENV) { - engine += ' exec-env/' + process.env.AWS_EXECUTION_ENV; - } - return engine; - } - }, - - userAgent: function userAgent() { - var name = util.environment; - var agent = 'aws-sdk-' + name + '/' + __webpack_require__(395).VERSION; - if (name === 'nodejs') agent += ' ' + util.engine(); - return agent; - }, - - uriEscape: function uriEscape(string) { - var output = encodeURIComponent(string); - output = output.replace(/[^A-Za-z0-9_.~\-%]+/g, escape); - - // AWS percent-encodes some extra non-standard characters in a URI - output = output.replace(/[*]/g, function(ch) { - return '%' + ch.charCodeAt(0).toString(16).toUpperCase(); - }); - - return output; - }, - - uriEscapePath: function uriEscapePath(string) { - var parts = []; - util.arrayEach(string.split('/'), function (part) { - parts.push(util.uriEscape(part)); - }); - return parts.join('/'); + codeScanning: { + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] }, - - urlParse: function urlParse(url) { - return util.url.parse(url); + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + listConductCodes: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }, { + renamed: ["codesOfConduct", "getAllCodesOfConduct"] + }] }, - - urlFormat: function urlFormat(url) { - return util.url.format(url); + emojis: { + get: ["GET /emojis"] }, - - queryStringParse: function queryStringParse(qs) { - return util.querystring.parse(qs); + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listPublicForUser: ["GET /users/{username}/gists", {}, { + renamed: ["gists", "listForUser"] + }], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] }, - - queryParamsToString: function queryParamsToString(params) { - var items = []; - var escape = util.uriEscape; - var sortedKeys = Object.keys(params).sort(); - - util.arrayEach(sortedKeys, function(name) { - var value = params[name]; - var ename = escape(name); - var result = ename + '='; - if (Array.isArray(value)) { - var vals = []; - util.arrayEach(value, function(item) { vals.push(escape(item)); }); - result = ename + '=' + vals.sort().join('&' + ename + '='); - } else if (value !== undefined && value !== null) { - result = ename + '=' + escape(value); - } - items.push(result); - }); - - return items.join('&'); + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] }, - - readFileSync: function readFileSync(path) { - if (util.isBrowser()) return null; - return __webpack_require__(747).readFileSync(path, 'utf-8'); + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"], + listTemplates: ["GET /gitignore/templates", {}, { + renamed: ["gitignore", "getAllTemplates"] + }] }, - - base64: { - encode: function encode64(string) { - if (typeof string === 'number') { - throw util.error(new Error('Cannot base64 encode number ' + string)); + interactions: { + addOrUpdateRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - if (string === null || typeof string === 'undefined') { - return string; + }, { + renamed: ["interactions", "setRestrictionsForOrg"] + }], + addOrUpdateRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - var buf = util.buffer.toBuffer(string); - return buf.toString('base64'); - }, - - decode: function decode64(string) { - if (typeof string === 'number') { - throw util.error(new Error('Cannot base64 decode number ' + string)); + }, { + renamed: ["interactions", "setRestrictionsForRepo"] + }], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - if (string === null || typeof string === 'undefined') { - return string; + }], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - return util.buffer.toBuffer(string, 'base64'); - } - - }, - - buffer: { - /** - * Buffer constructor for Node buffer and buffer pollyfill - */ - toBuffer: function(data, encoding) { - return (typeof util.Buffer.from === 'function' && util.Buffer.from !== Uint8Array.from) ? - util.Buffer.from(data, encoding) : new util.Buffer(data, encoding); - }, - - alloc: function(size, fill, encoding) { - if (typeof size !== 'number') { - throw new Error('size passed to alloc must be a number.'); + }], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - if (typeof util.Buffer.alloc === 'function') { - return util.Buffer.alloc(size, fill, encoding); - } else { - var buf = new util.Buffer(size); - if (fill !== undefined && typeof buf.fill === 'function') { - buf.fill(fill, undefined, undefined, encoding); - } - return buf; + }], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - }, - - toStream: function toStream(buffer) { - if (!util.Buffer.isBuffer(buffer)) buffer = util.buffer.toBuffer(buffer); - - var readable = new (util.stream.Readable)(); - var pos = 0; - readable._read = function(size) { - if (pos >= buffer.length) return readable.push(null); - - var end = pos + size; - if (end > buffer.length) end = buffer.length; - readable.push(buffer.slice(pos, end)); - pos = end; - }; - - return readable; - }, - - /** - * Concatenates a list of Buffer objects. - */ - concat: function(buffers) { - var length = 0, - offset = 0, - buffer = null, i; - - for (i = 0; i < buffers.length; i++) { - length += buffers[i].length; + }], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - - buffer = util.buffer.alloc(length); - - for (i = 0; i < buffers.length; i++) { - buffers[i].copy(buffer, offset); - offset += buffers[i].length; + }], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] } - - return buffer; - } + }] }, - - string: { - byteLength: function byteLength(string) { - if (string === null || string === undefined) return 0; - if (typeof string === 'string') string = util.buffer.toBuffer(string); - - if (typeof string.byteLength === 'number') { - return string.byteLength; - } else if (typeof string.length === 'number') { - return string.length; - } else if (typeof string.size === 'number') { - return string.size; - } else if (typeof string.path === 'string') { - return __webpack_require__(747).lstatSync(string.path).size; - } else { - throw util.error(new Error('Cannot determine length of ' + string), - { object: string }); + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkAssignee: ["GET /repos/{owner}/{repo}/assignees/{assignee}", {}, { + renamed: ["issues", "checkUserCanBeAssigned"] + }], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] } - }, - - upperFirst: function upperFirst(string) { - return string[0].toUpperCase() + string.substr(1); - }, - - lowerFirst: function lowerFirst(string) { - return string[0].toLowerCase() + string.substr(1); - } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + listMilestonesForRepo: ["GET /repos/{owner}/{repo}/milestones", {}, { + renamed: ["issues", "listMilestones"] + }], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "removeAllLabels"] + }], + replaceAllLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "setLabels"] + }], + replaceLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "replaceAllLabels"] + }], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] }, - - ini: { - parse: function string(ini) { - var currentSection, map = {}; - util.arrayEach(ini.split(/\r?\n/), function(line) { - line = line.split(/(^|\s)[;#]/)[0].trim(); // remove comments and trim - var isSection = line[0] === '[' && line[line.length - 1] === ']'; - if (isSection) { - currentSection = line.substring(1, line.length - 1); - if (currentSection === '__proto__' || currentSection.split(/\s/)[1] === '__proto__') { - throw util.error( - new Error('Cannot load profile name \'' + currentSection + '\' from shared ini file.') - ); - } - } else if (currentSection) { - var indexOfEqualsSign = line.indexOf('='); - var start = 0; - var end = line.length - 1; - var isAssignment = - indexOfEqualsSign !== -1 && indexOfEqualsSign !== start && indexOfEqualsSign !== end; - - if (isAssignment) { - var name = line.substring(0, indexOfEqualsSign).trim(); - var value = line.substring(indexOfEqualsSign + 1).trim(); - - map[currentSection] = map[currentSection] || {}; - map[currentSection][name] = value; - } - } - }); - - return map; - } + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"], + listCommonlyUsed: ["GET /licenses", {}, { + renamed: ["licenses", "getAllCommonlyUsed"] + }] }, - - fn: { - noop: function() {}, - callback: function (err) { if (err) throw err; }, - - /** - * Turn a synchronous function into as "async" function by making it call - * a callback. The underlying function is called with all but the last argument, - * which is treated as the callback. The callback is passed passed a first argument - * of null on success to mimick standard node callbacks. - */ - makeAsync: function makeAsync(fn, expectedArgs) { - if (expectedArgs && expectedArgs <= fn.length) { - return fn; + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" } - - return function() { - var args = Array.prototype.slice.call(arguments, 0); - var callback = args.pop(); - var result = fn.apply(null, args); - callback(result); - }; - } + }] }, - - /** - * Date and time utility functions. - */ - date: { - - /** - * @return [Date] the current JavaScript date object. Since all - * AWS services rely on this date object, you can override - * this function to provide a special time value to AWS service - * requests. - */ - getDate: function getDate() { - if (!AWS) AWS = __webpack_require__(395); - if (AWS.config.systemClockOffset) { // use offset when non-zero - return new Date(new Date().getTime() + AWS.config.systemClockOffset); - } else { - return new Date(); + meta: { + get: ["GET /meta"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] } - }, - - /** - * @return [String] the date in ISO-8601 format - */ - iso8601: function iso8601(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.toISOString().replace(/\.\d{3}Z$/, 'Z'); - }, - - /** - * @return [String] the date in RFC 822 format - */ - rfc822: function rfc822(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.toUTCString(); - }, - - /** - * @return [Integer] the UNIX timestamp value for the current time - */ - unixTimestamp: function unixTimestamp(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.getTime() / 1000; - }, - - /** - * @param [String,number,Date] date - * @return [Date] - */ - from: function format(date) { - if (typeof date === 'number') { - return new Date(date * 1000); // unix timestamp - } else { - return new Date(date); - } - }, - - /** - * Given a Date or date-like value, this function formats the - * date into a string of the requested value. - * @param [String,number,Date] date - * @param [String] formatter Valid formats are: - # * 'iso8601' - # * 'rfc822' - # * 'unixTimestamp' - * @return [String] - */ - format: function format(date, formatter) { - if (!formatter) formatter = 'iso8601'; - return util.date[formatter](util.date.from(date)); - }, - - parseTimestamp: function parseTimestamp(value) { - if (typeof value === 'number') { // unix timestamp (number) - return new Date(value * 1000); - } else if (value.match(/^\d+$/)) { // unix timestamp - return new Date(value * 1000); - } else if (value.match(/^\d{4}/)) { // iso8601 - return new Date(value); - } else if (value.match(/^\w{3},/)) { // rfc822 - return new Date(value); - } else { - throw util.error( - new Error('unhandled timestamp format: ' + value), - {code: 'TimestampParserError'}); + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] } - } - - }, - - crypto: { - crc32Table: [ - 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, - 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, - 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, - 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, - 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, - 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, - 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, - 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, - 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, - 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, - 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, - 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, - 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, - 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, - 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, - 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, - 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, - 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, - 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, - 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, - 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, - 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, - 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, - 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, - 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, - 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, - 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, - 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, - 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, - 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, - 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, - 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, - 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, - 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, - 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, - 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, - 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, - 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, - 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, - 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, - 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, - 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, - 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, - 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, - 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, - 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, - 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, - 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, - 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, - 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, - 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, - 0x2D02EF8D], - - crc32: function crc32(data) { - var tbl = util.crypto.crc32Table; - var crc = 0 ^ -1; - - if (typeof data === 'string') { - data = util.buffer.toBuffer(data); + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] } - - for (var i = 0; i < data.length; i++) { - var code = data.readUInt8(i); - crc = (crc >>> 8) ^ tbl[(crc ^ code) & 0xFF]; + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] } - return (crc ^ -1) >>> 0; - }, - - hmac: function hmac(key, string, digest, fn) { - if (!digest) digest = 'binary'; - if (digest === 'buffer') { digest = undefined; } - if (!fn) fn = 'sha256'; - if (typeof string === 'string') string = util.buffer.toBuffer(string); - return util.crypto.lib.createHmac(fn, key).update(string).digest(digest); - }, - - md5: function md5(data, digest, callback) { - return util.crypto.hash('md5', data, digest, callback); - }, - - sha256: function sha256(data, digest, callback) { - return util.crypto.hash('sha256', data, digest, callback); - }, - - hash: function(algorithm, data, digest, callback) { - var hash = util.crypto.createHash(algorithm); - if (!digest) { digest = 'binary'; } - if (digest === 'buffer') { digest = undefined; } - if (typeof data === 'string') data = util.buffer.toBuffer(data); - var sliceFn = util.arraySliceFn(data); - var isBuffer = util.Buffer.isBuffer(data); - //Identifying objects with an ArrayBuffer as buffers - if (util.isBrowser() && typeof ArrayBuffer !== 'undefined' && data && data.buffer instanceof ArrayBuffer) isBuffer = true; - - if (callback && typeof data === 'object' && - typeof data.on === 'function' && !isBuffer) { - data.on('data', function(chunk) { hash.update(chunk); }); - data.on('error', function(err) { callback(err); }); - data.on('end', function() { callback(null, hash.digest(digest)); }); - } else if (callback && sliceFn && !isBuffer && - typeof FileReader !== 'undefined') { - // this might be a File/Blob - var index = 0, size = 1024 * 512; - var reader = new FileReader(); - reader.onerror = function() { - callback(new Error('Failed to read data.')); - }; - reader.onload = function() { - var buf = new util.Buffer(new Uint8Array(reader.result)); - hash.update(buf); - index += buf.length; - reader._continueReading(); - }; - reader._continueReading = function() { - if (index >= data.size) { - callback(null, hash.digest(digest)); - return; - } - - var back = index + size; - if (back > data.size) back = data.size; - reader.readAsArrayBuffer(sliceFn.call(data, index, back)); - }; - - reader._continueReading(); - } else { - if (util.isBrowser() && typeof data === 'object' && !isBuffer) { - data = new util.Buffer(new Uint8Array(data)); - } - var out = hash.update(data).digest(digest); - if (callback) callback(null, out); - return out; + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportProgress: ["GET /repos/{owner}/{repo}/import", {}, { + renamed: ["migrations", "getImportStatus"] + }], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] } - }, - - toHex: function toHex(data) { - var out = []; - for (var i = 0; i < data.length; i++) { - out.push(('0' + data.charCodeAt(i).toString(16)).substr(-2, 2)); + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] } - return out.join(''); - }, - - createHash: function createHash(algorithm) { - return util.crypto.lib.createHash(algorithm); - } - - }, - - /** @!ignore */ - - /* Abort constant */ - abort: {}, - - each: function each(object, iterFunction) { - for (var key in object) { - if (Object.prototype.hasOwnProperty.call(object, key)) { - var ret = iterFunction.call(this, key, object[key]); - if (ret === util.abort) break; + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] } - } - }, - - arrayEach: function arrayEach(array, iterFunction) { - for (var idx in array) { - if (Object.prototype.hasOwnProperty.call(array, idx)) { - var ret = iterFunction.call(this, array[idx], parseInt(idx, 10)); - if (ret === util.abort) break; + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] } - } - }, - - update: function update(obj1, obj2) { - util.each(obj2, function iterator(key, item) { - obj1[key] = item; - }); - return obj1; - }, - - merge: function merge(obj1, obj2) { - return util.update(util.copy(obj1), obj2); - }, - - copy: function copy(object) { - if (object === null || object === undefined) return object; - var dupe = {}; - // jshint forin:false - for (var key in object) { - dupe[key] = object[key]; - } - return dupe; - }, - - isEmpty: function isEmpty(obj) { - for (var prop in obj) { - if (Object.prototype.hasOwnProperty.call(obj, prop)) { - return false; + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] } - } - return true; - }, - - arraySliceFn: function arraySliceFn(obj) { - var fn = obj.slice || obj.webkitSlice || obj.mozSlice; - return typeof fn === 'function' ? fn : null; - }, - - isType: function isType(obj, type) { - // handle cross-"frame" objects - if (typeof type === 'function') type = util.typeName(type); - return Object.prototype.toString.call(obj) === '[object ' + type + ']'; - }, - - typeName: function typeName(type) { - if (Object.prototype.hasOwnProperty.call(type, 'name')) return type.name; - var str = type.toString(); - var match = str.match(/^\s*function (.+)\(/); - return match ? match[1] : str; - }, - - error: function error(err, options) { - var originalError = null; - if (typeof err.message === 'string' && err.message !== '') { - if (typeof options === 'string' || (options && options.message)) { - originalError = util.copy(err); - originalError.message = err.message; + }], + listReposForUser: ["GET /user/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] } - } - err.message = err.message || null; - - if (typeof options === 'string') { - err.message = options; - } else if (typeof options === 'object' && options !== null) { - util.update(err, options); - if (options.message) - err.message = options.message; - if (options.code || options.name) - err.code = options.code || options.name; - if (options.stack) - err.stack = options.stack; - } - - if (typeof Object.defineProperty === 'function') { - Object.defineProperty(err, 'name', {writable: true, enumerable: false}); - Object.defineProperty(err, 'message', {enumerable: true}); - } - - err.name = String(options && options.name || err.name || err.code || 'Error'); - err.time = new Date(); - - if (originalError) err.originalError = originalError; - - return err; - }, - - /** - * @api private - */ - inherit: function inherit(klass, features) { - var newObject = null; - if (features === undefined) { - features = klass; - klass = Object; - newObject = {}; - } else { - var ctor = function ConstructorWrapper() {}; - ctor.prototype = klass.prototype; - newObject = new ctor(); - } - - // constructor not supplied, create pass-through ctor - if (features.constructor === Object) { - features.constructor = function() { - if (klass !== Object) { - return klass.apply(this, arguments); - } - }; - } - - features.constructor.prototype = newObject; - util.update(features.constructor.prototype, features); - features.constructor.__super__ = klass; - return features.constructor; - }, - - /** - * @api private - */ - mixin: function mixin() { - var klass = arguments[0]; - for (var i = 1; i < arguments.length; i++) { - // jshint forin:false - for (var prop in arguments[i].prototype) { - var fn = arguments[i].prototype[prop]; - if (prop !== 'constructor') { - klass.prototype[prop] = fn; - } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] } - } - return klass; - }, - - /** - * @api private - */ - hideProperties: function hideProperties(obj, props) { - if (typeof Object.defineProperty !== 'function') return; - - util.arrayEach(props, function (key) { - Object.defineProperty(obj, key, { - enumerable: false, writable: true, configurable: true }); - }); - }, - - /** - * @api private - */ - property: function property(obj, name, value, enumerable, isValue) { - var opts = { - configurable: true, - enumerable: enumerable !== undefined ? enumerable : true - }; - if (typeof value === 'function' && !isValue) { - opts.get = value; - } - else { - opts.value = value; opts.writable = true; - } - - Object.defineProperty(obj, name, opts); - }, - - /** - * @api private - */ - memoizedProperty: function memoizedProperty(obj, name, get, enumerable) { - var cachedValue = null; - - // build enumerable attribute for each value with lazy accessor. - util.property(obj, name, function() { - if (cachedValue === null) { - cachedValue = get(); + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] } - return cachedValue; - }, enumerable); + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] }, - - /** - * TODO Remove in major version revision - * This backfill populates response data without the - * top-level payload name. - * - * @api private - */ - hoistPayloadMember: function hoistPayloadMember(resp) { - var req = resp.request; - var operationName = req.operation; - var operation = req.service.api.operations[operationName]; - var output = operation.output; - if (output.payload && !operation.hasEventOutput) { - var payloadMember = output.members[output.payload]; - var responsePayload = resp.data[output.payload]; - if (payloadMember.type === 'structure') { - util.each(responsePayload, function(key, value) { - util.property(resp.data, key, value, false); - }); + orgs: { + addOrUpdateMembership: ["PUT /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "setMembershipForUser"] + }], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembership: ["GET /orgs/{org}/members/{username}", {}, { + renamed: ["orgs", "checkMembershipForUser"] + }], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembership: ["GET /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "checkPublicMembershipForUser"] + }], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + concealMembership: ["DELETE /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "removePublicMembershipForAuthenticatedUser"] + }], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createHook: ["POST /orgs/{org}/hooks", {}, { + renamed: ["orgs", "createWebhook"] + }], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteHook: ["DELETE /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "deleteWebhook"] + }], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getHook: ["GET /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "getWebhook"] + }], + getMembership: ["GET /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "getMembershipForUser"] + }], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] } - } - }, - - /** - * Compute SHA-256 checksums of streams - * - * @api private - */ - computeSha256: function computeSha256(body, done) { - if (util.isNode()) { - var Stream = util.stream.Stream; - var fs = __webpack_require__(747); - if (typeof Stream === 'function' && body instanceof Stream) { - if (typeof body.path === 'string') { // assume file object - var settings = {}; - if (typeof body.start === 'number') { - settings.start = body.start; - } - if (typeof body.end === 'number') { - settings.end = body.end; - } - body = fs.createReadStream(body.path, settings); - } else { // TODO support other stream types - return done(new Error('Non-file stream objects are ' + - 'not supported with SigV4')); - } + }], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listHooks: ["GET /orgs/{org}/hooks", {}, { + renamed: ["orgs", "listWebhooks"] + }], + listInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] } - } - - util.crypto.sha256(body, 'hex', function(err, sha) { - if (err) done(err); - else done(null, sha); - }); - }, - - /** - * @api private - */ - isClockSkewed: function isClockSkewed(serverTime) { - if (serverTime) { - util.property(AWS.config, 'isClockSkewed', - Math.abs(new Date().getTime() - serverTime) >= 300000, false); - return AWS.config.isClockSkewed; - } - }, - - applyClockOffset: function applyClockOffset(serverTime) { - if (serverTime) - AWS.config.systemClockOffset = serverTime - new Date().getTime(); - }, - - /** - * @api private - */ - extractRequestId: function extractRequestId(resp) { - var requestId = resp.httpResponse.headers['x-amz-request-id'] || - resp.httpResponse.headers['x-amzn-requestid']; - - if (!requestId && resp.data && resp.data.ResponseMetadata) { - requestId = resp.data.ResponseMetadata.RequestId; - } - - if (requestId) { - resp.requestId = requestId; - } - - if (resp.error) { - resp.error.requestId = requestId; - } + }, { + renamed: ["orgs", "listAppInstallations"] + }], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMemberships: ["GET /user/memberships/orgs", {}, { + renamed: ["orgs", "listMembershipsForAuthenticatedUser"] + }], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingHook: ["POST /orgs/{org}/hooks/{hook_id}/pings", {}, { + renamed: ["orgs", "pingWebhook"] + }], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + publicizeMembership: ["PUT /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "setPublicMembershipForAuthenticatedUser"] + }], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembership: ["DELETE /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "removeMembershipForUser"] + }], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateHook: ["PATCH /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "updateWebhook"] + }], + updateMembership: ["PATCH /user/memberships/orgs/{org}", {}, { + renamed: ["orgs", "updateMembershipForAuthenticatedUser"] + }], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] }, - - /** - * @api private - */ - addPromises: function addPromises(constructors, PromiseDependency) { - var deletePromises = false; - if (PromiseDependency === undefined && AWS && AWS.config) { - PromiseDependency = AWS.config.getPromisesDependency(); - } - if (PromiseDependency === undefined && typeof Promise !== 'undefined') { - PromiseDependency = Promise; - } - if (typeof PromiseDependency !== 'function') deletePromises = true; - if (!Array.isArray(constructors)) constructors = [constructors]; - - for (var ind = 0; ind < constructors.length; ind++) { - var constructor = constructors[ind]; - if (deletePromises) { - if (constructor.deletePromisesFromClass) { - constructor.deletePromisesFromClass(); - } - } else if (constructor.addPromisesToClass) { - constructor.addPromisesToClass(PromiseDependency); + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] } - } - }, - - /** - * @api private - * Return a function that will return a promise whose fate is decided by the - * callback behavior of the given method with `methodName`. The method to be - * promisified should conform to node.js convention of accepting a callback as - * last argument and calling that callback with error as the first argument - * and success value on the second argument. - */ - promisifyMethod: function promisifyMethod(methodName, PromiseDependency) { - return function promise() { - var self = this; - var args = Array.prototype.slice.call(arguments); - return new PromiseDependency(function(resolve, reject) { - args.push(function(err, data) { - if (err) { - reject(err); - } else { - resolve(data); - } - }); - self[methodName].apply(self, args); - }); - }; - }, - - /** - * @api private - */ - isDualstackAvailable: function isDualstackAvailable(service) { - if (!service) return false; - var metadata = __webpack_require__(694); - if (typeof service !== 'string') service = service.serviceIdentifier; - if (typeof service !== 'string' || !metadata.hasOwnProperty(service)) return false; - return !!metadata[service].dualstackAvailable; - }, - - /** - * @api private - */ - calculateRetryDelay: function calculateRetryDelay(retryCount, retryDelayOptions, err) { - if (!retryDelayOptions) retryDelayOptions = {}; - var customBackoff = retryDelayOptions.customBackoff || null; - if (typeof customBackoff === 'function') { - return customBackoff(retryCount, err); - } - var base = typeof retryDelayOptions.base === 'number' ? retryDelayOptions.base : 100; - var delay = Math.random() * (Math.pow(2, retryCount) * base); - return delay; - }, - - /** - * @api private - */ - handleRequestWithRetries: function handleRequestWithRetries(httpRequest, options, cb) { - if (!options) options = {}; - var http = AWS.HttpClient.getInstance(); - var httpOptions = options.httpOptions || {}; - var retryCount = 0; - - var errCallback = function(err) { - var maxRetries = options.maxRetries || 0; - if (err && err.code === 'TimeoutError') err.retryable = true; - - // Call `calculateRetryDelay()` only when relevant, see #3401 - if (err && err.retryable && retryCount < maxRetries) { - var delay = util.calculateRetryDelay(retryCount, options.retryDelayOptions, err); - if (delay >= 0) { - retryCount++; - setTimeout(sendRequest, delay + (err.retryAfter || 0)); - return; - } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] } - cb(err); - }; - - var sendRequest = function() { - var data = ''; - http.handleRequest(httpRequest, httpOptions, function(httpResponse) { - httpResponse.on('data', function(chunk) { data += chunk.toString(); }); - httpResponse.on('end', function() { - var statusCode = httpResponse.statusCode; - if (statusCode < 300) { - cb(null, data); - } else { - var retryAfter = parseInt(httpResponse.headers['retry-after'], 10) * 1000 || 0; - var err = util.error(new Error(), - { - statusCode: statusCode, - retryable: statusCode >= 500 || statusCode === 429 - } - ); - if (retryAfter && err.retryable) err.retryAfter = retryAfter; - errCallback(err); - } - }); - }, errCallback); - }; - - AWS.util.defer(sendRequest); - }, - - /** - * @api private - */ - uuid: { - v4: function uuidV4() { - return __webpack_require__(62).v4(); - } - }, - - /** - * @api private - */ - convertPayloadToString: function convertPayloadToString(resp) { - var req = resp.request; - var operation = req.operation; - var rules = req.service.api.operations[operation].output || {}; - if (rules.payload && resp.data[rules.payload]) { - resp.data[rules.payload] = resp.data[rules.payload].toString(); - } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + reviewUserPermissionLevel: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["projects", "getPermissionForUser"] + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] }, - - /** - * @api private - */ - defer: function defer(callback) { - if (typeof process === 'object' && typeof process.nextTick === 'function') { - process.nextTick(callback); - } else if (typeof setImmediate === 'function') { - setImmediate(callback); - } else { - setTimeout(callback, 0); - } + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { + renamed: ["pulls", "createReviewComment"] + }], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + createReviewCommentReply: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", {}, { + renamed: ["pulls", "createReplyForReviewComment"] + }], + createReviewRequest: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "requestReviewers"] + }], + deleteComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "deleteReviewComment"] + }], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + deleteReviewRequest: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "removeRequestedReviewers"] + }], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "getReviewComment"] + }], + getCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", {}, { + renamed: ["pulls", "listCommentsForReview"] + }], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { + renamed: ["pulls", "listReviewComments"] + }], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments", {}, { + renamed: ["pulls", "listReviewCommentsForRepo"] + }], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviewRequests: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "listRequestedReviewers"] + }], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "updateReviewComment"] + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] }, - - /** - * @api private - */ - getRequestPayloadShape: function getRequestPayloadShape(req) { - var operations = req.service.api.operations; - if (!operations) return undefined; - var operation = (operations || {})[req.operation]; - if (!operation || !operation.input || !operation.input.payload) return undefined; - return operation.input.members[operation.input.payload]; + rateLimit: { + get: ["GET /rate_limit"] }, - - getProfilesFromSharedConfig: function getProfilesFromSharedConfig(iniLoader, filename) { - var profiles = {}; - var profilesFromConfig = {}; - if (process.env[util.configOptInEnv]) { - var profilesFromConfig = iniLoader.loadFrom({ - isConfig: true, - filename: process.env[util.sharedConfigFileEnv] - }); - } - var profilesFromCreds= {}; - try { - var profilesFromCreds = iniLoader.loadFrom({ - filename: filename || - (process.env[util.configOptInEnv] && process.env[util.sharedCredentialsFileEnv]) - }); - } catch (error) { - // if using config, assume it is fully descriptive without a credentials file: - if (!process.env[util.configOptInEnv]) throw error; - } - for (var i = 0, profileNames = Object.keys(profilesFromConfig); i < profileNames.length; i++) { - profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromConfig[profileNames[i]]); - } - for (var i = 0, profileNames = Object.keys(profilesFromCreds); i < profileNames.length; i++) { - profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromCreds[profileNames[i]]); - } - return profiles; - - /** - * Roughly the semantics of `Object.assign(target, source)` - */ - function objectAssign(target, source) { - for (var i = 0, keys = Object.keys(source); i < keys.length; i++) { - target[keys[i]] = source[keys[i]]; + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - return target; - } - }, - - /** - * @api private - */ - ARN: { - validate: function validateARN(str) { - return str && str.indexOf('arn:') === 0 && str.split(':').length >= 6; - }, - parse: function parseARN(arn) { - var matched = arn.split(':'); - return { - partition: matched[1], - service: matched[2], - region: matched[3], - accountId: matched[4], - resource: matched.slice(5).join(':') - }; - }, - build: function buildARN(arnObject) { - if ( - arnObject.service === undefined || - arnObject.region === undefined || - arnObject.accountId === undefined || - arnObject.resource === undefined - ) throw util.error(new Error('Input ARN object is invalid')); - return 'arn:'+ (arnObject.partition || 'aws') + ':' + arnObject.service + - ':' + arnObject.region + ':' + arnObject.accountId + ':' + arnObject.resource; - } - }, - - /** - * @api private - */ - defaultProfile: 'default', - - /** - * @api private - */ - configOptInEnv: 'AWS_SDK_LOAD_CONFIG', - - /** - * @api private - */ - sharedCredentialsFileEnv: 'AWS_SHARED_CREDENTIALS_FILE', - - /** - * @api private - */ - sharedConfigFileEnv: 'AWS_CONFIG_FILE', - - /** - * @api private - */ - imdsDisabledEnv: 'AWS_EC2_METADATA_DISABLED' -}; - -/** - * @api private - */ -module.exports = util; - - -/***/ }), -/* 154 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); - -/** - * Represents AWS token object, which contains {token}, and optional - * {expireTime}. - * Creating a `Token` object allows you to pass around your - * token to configuration and service objects. - * - * Note that this class typically does not need to be constructed manually, - * as the {AWS.Config} and {AWS.Service} classes both accept simple - * options hashes with the two keys. The token from this object will be used - * automatically in operations which require them. - * - * ## Expiring and Refreshing Token - * - * Occasionally token can expire in the middle of a long-running - * application. In this case, the SDK will automatically attempt to - * refresh the token from the storage location if the Token - * class implements the {refresh} method. - * - * If you are implementing a token storage location, you - * will want to create a subclass of the `Token` class and - * override the {refresh} method. This method allows token to be - * retrieved from the backing store, be it a file system, database, or - * some network storage. The method should reset the token attributes - * on the object. - * - * @!attribute token - * @return [String] represents the literal token string. This will typically - * be a base64 encoded string. - * @!attribute expireTime - * @return [Date] a time when token should be considered expired. Used - * in conjunction with {expired}. - * @!attribute expired - * @return [Boolean] whether the token is expired and require a refresh. Used - * in conjunction with {expireTime}. - */ -AWS.Token = AWS.util.inherit({ - /** - * Creates a Token object with a given set of information in options hash. - * @option options token [String] represents the literal token string. - * @option options expireTime [Date] field representing the time at which - * the token expires. - * @example Create a token object - * var token = new AWS.Token({ token: 'token' }); - */ - constructor: function Token(options) { - // hide token from being displayed with util.inspect - AWS.util.hideProperties(this, ['token']); - - this.expired = false; - this.expireTime = null; - this.refreshCallbacks = []; - if (arguments.length === 1) { - var options = arguments[0]; - this.token = options.token; - this.expireTime = options.expireTime; - } - }, - - /** - * @return [Integer] the number of seconds before {expireTime} during which - * the token will be considered expired. - */ - expiryWindow: 15, - - /** - * @return [Boolean] whether the Token object should call {refresh} - * @note Subclasses should override this method to provide custom refresh - * logic. - */ - needsRefresh: function needsRefresh() { - var currentTime = AWS.util.date.getDate().getTime(); - var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); - - if (this.expireTime && adjustedTime > this.expireTime) - return true; - - return this.expired || !this.token; - }, - - /** - * Gets the existing token, refreshing them if they are not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload token when they are already - * loaded into the object. - * - * @callback callback function(err) - * When this callback is called with no error, it means either token - * do not need to be refreshed or refreshed token information has - * been loaded into the object (as the `token` property). - * @param err [Error] if an error occurred, this value will be filled - */ - get: function get(callback) { - var self = this; - if (this.needsRefresh()) { - this.refresh(function(err) { - if (!err) self.expired = false; // reset expired flag - if (callback) callback(err); - }); - } else if (callback) { - callback(); - } - }, - - /** - * @!method getPromise() - * Returns a 'thenable' promise. - * Gets the existing token, refreshing it if it's not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload token when it's already - * loaded into the object. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it means - * either token does not need to be refreshed or refreshed token information - * has been loaded into the object (as the `token` property). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled. - * @return [Promise] A promise that represents the state of the `get` call. - * @example Calling the `getPromise` method. - * var promise = tokenProvider.getPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * @!method refreshPromise() - * Returns a 'thenable' promise. - * Refreshes the token. Users should call {get} before attempting - * to forcibly refresh token. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it - * means refreshed token information has been loaded into the object - * (as the `token` property). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled. - * @return [Promise] A promise that represents the state of the `refresh` call. - * @example Calling the `refreshPromise` method. - * var promise = tokenProvider.refreshPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * Refreshes the token. Users should call {get} before attempting - * to forcibly refresh token. - * - * @callback callback function(err) - * When this callback is called with no error, it means refreshed - * token information has been loaded into the object (as the - * `token` property). - * @param err [Error] if an error occurred, this value will be filled - * @note Subclasses should override this class to reset the - * {token} on the token object and then call the callback with - * any error information. - * @see get - */ - refresh: function refresh(callback) { - this.expired = false; - callback(); - }, - - /** - * @api private - * @param callback - */ - coalesceRefresh: function coalesceRefresh(callback, sync) { - var self = this; - if (self.refreshCallbacks.push(callback) === 1) { - self.load(function onLoad(err) { - AWS.util.arrayEach(self.refreshCallbacks, function(callback) { - if (sync) { - callback(err); - } else { - // callback could throw, so defer to ensure all callbacks are notified - AWS.util.defer(function () { - callback(err); - }); - } - }); - self.refreshCallbacks.length = 0; - }); - } - }, - - /** - * @api private - * @param callback - */ - load: function load(callback) { - callback(); - } -}); - -/** - * @api private - */ -AWS.Token.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); - this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); -}; - -/** - * @api private - */ -AWS.Token.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.getPromise; - delete this.prototype.refreshPromise; -}; - -AWS.util.addPromises(AWS.Token); - - -/***/ }), -/* 155 */, -/* 156 */, -/* 157 */, -/* 158 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); -var v4Credentials = __webpack_require__(819); -var resolveRegionalEndpointsFlag = __webpack_require__(232); -var s3util = __webpack_require__(338); -var regionUtil = __webpack_require__(546); - -// Pull in managed upload extension -__webpack_require__(856); - -/** - * @api private - */ -var operationsWith200StatusCodeError = { - 'completeMultipartUpload': true, - 'copyObject': true, - 'uploadPartCopy': true -}; - -/** - * @api private - */ - var regionRedirectErrorCodes = [ - 'AuthorizationHeaderMalformed', // non-head operations on virtual-hosted global bucket endpoints - 'BadRequest', // head operations on virtual-hosted global bucket endpoints - 'PermanentRedirect', // non-head operations on path-style or regional endpoints - 301 // head operations on path-style or regional endpoints - ]; - -var OBJECT_LAMBDA_SERVICE = 's3-object-lambda'; - -AWS.util.update(AWS.S3.prototype, { - /** - * @api private - */ - getSignatureVersion: function getSignatureVersion(request) { - var defaultApiVersion = this.api.signatureVersion; - var userDefinedVersion = this._originalConfig ? this._originalConfig.signatureVersion : null; - var regionDefinedVersion = this.config.signatureVersion; - var isPresigned = request ? request.isPresigned() : false; - /* - 1) User defined version specified: - a) always return user defined version - 2) No user defined version specified: - a) If not using presigned urls, default to V4 - b) If using presigned urls, default to lowest version the region supports - */ - if (userDefinedVersion) { - userDefinedVersion = userDefinedVersion === 'v2' ? 's3' : userDefinedVersion; - return userDefinedVersion; - } - if (isPresigned !== true) { - defaultApiVersion = 'v4'; - } else if (regionDefinedVersion) { - defaultApiVersion = regionDefinedVersion; - } - return defaultApiVersion; - }, - - /** - * @api private - */ - getSigningName: function getSigningName(req) { - if (req && req.operation === 'writeGetObjectResponse') { - return OBJECT_LAMBDA_SERVICE; - } - - var _super = AWS.Service.prototype.getSigningName; - return (req && req._parsedArn && req._parsedArn.service) - ? req._parsedArn.service - : _super.call(this); - }, - - /** - * @api private - */ - getSignerClass: function getSignerClass(request) { - var signatureVersion = this.getSignatureVersion(request); - return AWS.Signers.RequestSigner.getVersion(signatureVersion); - }, - - /** - * @api private - */ - validateService: function validateService() { - var msg; - var messages = []; - - // default to us-east-1 when no region is provided - if (!this.config.region) this.config.region = 'us-east-1'; - - if (!this.config.endpoint && this.config.s3BucketEndpoint) { - messages.push('An endpoint must be provided when configuring ' + - '`s3BucketEndpoint` to true.'); - } - if (messages.length === 1) { - msg = messages[0]; - } else if (messages.length > 1) { - msg = 'Multiple configuration errors:\n' + messages.join('\n'); - } - if (msg) { - throw AWS.util.error(new Error(), - {name: 'InvalidEndpoint', message: msg}); - } - }, - - /** - * @api private - */ - shouldDisableBodySigning: function shouldDisableBodySigning(request) { - var signerClass = this.getSignerClass(); - if (this.config.s3DisableBodySigning === true && signerClass === AWS.Signers.V4 - && request.httpRequest.endpoint.protocol === 'https:') { - return true; - } - return false; - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - var prependListener = true; - request.addListener('validate', this.validateScheme); - request.addListener('validate', this.validateBucketName, prependListener); - request.addListener('validate', this.optInUsEast1RegionalEndpoint, prependListener); - - request.removeListener('validate', - AWS.EventListeners.Core.VALIDATE_REGION); - request.addListener('build', this.addContentType); - request.addListener('build', this.computeContentMd5); - request.addListener('build', this.computeSseCustomerKeyMd5); - request.addListener('build', this.populateURI); - request.addListener('afterBuild', this.addExpect100Continue); - request.addListener('extractError', this.extractError); - request.addListener('extractData', AWS.util.hoistPayloadMember); - request.addListener('extractData', this.extractData); - request.addListener('extractData', this.extractErrorFrom200Response); - request.addListener('beforePresign', this.prepareSignedUrl); - if (this.shouldDisableBodySigning(request)) { - request.removeListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); - request.addListener('afterBuild', this.disableBodySigning); - } - //deal with ARNs supplied to Bucket - if (request.operation !== 'createBucket' && s3util.isArnInParam(request, 'Bucket')) { - // avoid duplicate parsing in the future - request._parsedArn = AWS.util.ARN.parse(request.params.Bucket); - - request.removeListener('validate', this.validateBucketName); - request.removeListener('build', this.populateURI); - if (request._parsedArn.service === 's3') { - request.addListener('validate', s3util.validateS3AccessPointArn); - request.addListener('validate', this.validateArnResourceType); - request.addListener('validate', this.validateArnRegion); - } else if (request._parsedArn.service === 's3-outposts') { - request.addListener('validate', s3util.validateOutpostsAccessPointArn); - request.addListener('validate', s3util.validateOutpostsArn); - request.addListener('validate', s3util.validateArnRegion); + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - request.addListener('validate', s3util.validateArnAccount); - request.addListener('validate', s3util.validateArnService); - request.addListener('build', this.populateUriFromAccessPointArn); - request.addListener('build', s3util.validatePopulateUriFromArn); - return; - } - //listeners regarding region inference - request.addListener('validate', this.validateBucketEndpoint); - request.addListener('validate', this.correctBucketRegionFromCache); - request.onAsync('extractError', this.requestBucketRegion); - if (AWS.util.isBrowser()) { - request.onAsync('retry', this.reqRegionForNetworkingError); - } - }, - - /** - * @api private - */ - validateScheme: function(req) { - var params = req.params, - scheme = req.httpRequest.endpoint.protocol, - sensitive = params.SSECustomerKey || params.CopySourceSSECustomerKey; - if (sensitive && scheme !== 'https:') { - var msg = 'Cannot send SSE keys over HTTP. Set \'sslEnabled\'' + - 'to \'true\' in your configuration'; - throw AWS.util.error(new Error(), - { code: 'ConfigError', message: msg }); - } - }, - - /** - * @api private - */ - validateBucketEndpoint: function(req) { - if (!req.params.Bucket && req.service.config.s3BucketEndpoint) { - var msg = 'Cannot send requests to root API with `s3BucketEndpoint` set.'; - throw AWS.util.error(new Error(), - { code: 'ConfigError', message: msg }); - } - }, - - /** - * @api private - */ - validateArnRegion: function validateArnRegion(req) { - s3util.validateArnRegion(req, { allowFipsEndpoint: true }); - }, - - /** - * Validate resource-type supplied in S3 ARN - */ - validateArnResourceType: function validateArnResourceType(req) { - var resource = req._parsedArn.resource; - - if ( - resource.indexOf('accesspoint:') !== 0 && - resource.indexOf('accesspoint/') !== 0 - ) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'ARN resource should begin with \'accesspoint/\'' - }); - } - }, - - /** - * @api private - */ - validateBucketName: function validateBucketName(req) { - var service = req.service; - var signatureVersion = service.getSignatureVersion(req); - var bucket = req.params && req.params.Bucket; - var key = req.params && req.params.Key; - var slashIndex = bucket && bucket.indexOf('/'); - if (bucket && slashIndex >= 0) { - if (typeof key === 'string' && slashIndex > 0) { - req.params = AWS.util.copy(req.params); - // Need to include trailing slash to match sigv2 behavior - var prefix = bucket.substr(slashIndex + 1) || ''; - req.params.Key = prefix + '/' + key; - req.params.Bucket = bucket.substr(0, slashIndex); - } else if (signatureVersion === 'v4') { - var msg = 'Bucket names cannot contain forward slashes. Bucket: ' + bucket; - throw AWS.util.error(new Error(), - { code: 'InvalidBucket', message: msg }); + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - } - }, - - /** - * @api private - */ - isValidAccelerateOperation: function isValidAccelerateOperation(operation) { - var invalidOperations = [ - 'createBucket', - 'deleteBucket', - 'listBuckets' - ]; - return invalidOperations.indexOf(operation) === -1; - }, - - /** - * When us-east-1 region endpoint configuration is set, in stead of sending request to - * global endpoint(e.g. 's3.amazonaws.com'), we will send request to - * 's3.us-east-1.amazonaws.com'. - * @api private - */ - optInUsEast1RegionalEndpoint: function optInUsEast1RegionalEndpoint(req) { - var service = req.service; - var config = service.config; - config.s3UsEast1RegionalEndpoint = resolveRegionalEndpointsFlag(service._originalConfig, { - env: 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT', - sharedConfig: 's3_us_east_1_regional_endpoint', - clientConfig: 's3UsEast1RegionalEndpoint' - }); - if ( - !(service._originalConfig || {}).endpoint && - req.httpRequest.region === 'us-east-1' && - config.s3UsEast1RegionalEndpoint === 'regional' && - req.httpRequest.endpoint.hostname.indexOf('s3.amazonaws.com') >= 0 - ) { - var insertPoint = config.endpoint.indexOf('.amazonaws.com'); - regionalEndpoint = config.endpoint.substring(0, insertPoint) + - '.us-east-1' + config.endpoint.substring(insertPoint); - req.httpRequest.updateEndpoint(regionalEndpoint); - } - }, - - /** - * S3 prefers dns-compatible bucket names to be moved from the uri path - * to the hostname as a sub-domain. This is not possible, even for dns-compat - * buckets when using SSL and the bucket name contains a dot ('.'). The - * ssl wildcard certificate is only 1-level deep. - * - * @api private - */ - populateURI: function populateURI(req) { - var httpRequest = req.httpRequest; - var b = req.params.Bucket; - var service = req.service; - var endpoint = httpRequest.endpoint; - if (b) { - if (!service.pathStyleBucketName(b)) { - if (service.config.useAccelerateEndpoint && service.isValidAccelerateOperation(req.operation)) { - if (service.config.useDualstackEndpoint) { - endpoint.hostname = b + '.s3-accelerate.dualstack.amazonaws.com'; - } else { - endpoint.hostname = b + '.s3-accelerate.amazonaws.com'; - } - } else if (!service.config.s3BucketEndpoint) { - endpoint.hostname = - b + '.' + endpoint.hostname; - } - - var port = endpoint.port; - if (port !== 80 && port !== 443) { - endpoint.host = endpoint.hostname + ':' + - endpoint.port; - } else { - endpoint.host = endpoint.hostname; - } - - httpRequest.virtualHostedBucket = b; // needed for signing the request - service.removeVirtualHostedBucketFromPath(req); + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - } - }, - - /** - * Takes the bucket name out of the path if bucket is virtual-hosted - * - * @api private - */ - removeVirtualHostedBucketFromPath: function removeVirtualHostedBucketFromPath(req) { - var httpRequest = req.httpRequest; - var bucket = httpRequest.virtualHostedBucket; - if (bucket && httpRequest.path) { - if (req.params && req.params.Key) { - var encodedS3Key = '/' + AWS.util.uriEscapePath(req.params.Key); - if (httpRequest.path.indexOf(encodedS3Key) === 0 && (httpRequest.path.length === encodedS3Key.length || httpRequest.path[encodedS3Key.length] === '?')) { - //path only contains key or path contains only key and querystring - return; - } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - httpRequest.path = httpRequest.path.replace(new RegExp('/' + bucket), ''); - if (httpRequest.path[0] !== '/') { - httpRequest.path = '/' + httpRequest.path; + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - } - }, - - /** - * When user supply an access point ARN in the Bucket parameter, we need to - * populate the URI according to the ARN. - */ - populateUriFromAccessPointArn: function populateUriFromAccessPointArn(req) { - var accessPointArn = req._parsedArn; - - var isOutpostArn = accessPointArn.service === 's3-outposts'; - var isObjectLambdaArn = accessPointArn.service === 's3-object-lambda'; - - var outpostsSuffix = isOutpostArn ? '.' + accessPointArn.outpostId: ''; - var serviceName = isOutpostArn ? 's3-outposts': 's3-accesspoint'; - var fipsSuffix = !isOutpostArn && req.service.config.useFipsEndpoint ? '-fips': ''; - var dualStackSuffix = !isOutpostArn && - req.service.config.useDualstackEndpoint ? '.dualstack' : ''; - - var endpoint = req.httpRequest.endpoint; - var dnsSuffix = regionUtil.getEndpointSuffix(accessPointArn.region); - var useArnRegion = req.service.config.s3UseArnRegion; - - endpoint.hostname = [ - accessPointArn.accessPoint + '-' + accessPointArn.accountId + outpostsSuffix, - serviceName + fipsSuffix + dualStackSuffix, - useArnRegion ? accessPointArn.region : req.service.config.region, - dnsSuffix - ].join('.'); - - if (isObjectLambdaArn) { - // should be in the format: "accesspoint/${accesspointName}" - var serviceName = 's3-object-lambda'; - var accesspointName = accessPointArn.resource.split('/')[1]; - var fipsSuffix = req.service.config.useFipsEndpoint ? '-fips': ''; - endpoint.hostname = [ - accesspointName + '-' + accessPointArn.accountId, - serviceName + fipsSuffix, - useArnRegion ? accessPointArn.region : req.service.config.region, - dnsSuffix - ].join('.'); - } - endpoint.host = endpoint.hostname; - var encodedArn = AWS.util.uriEscape(req.params.Bucket); - var path = req.httpRequest.path; - //remove the Bucket value from path - req.httpRequest.path = path.replace(new RegExp('/' + encodedArn), ''); - if (req.httpRequest.path[0] !== '/') { - req.httpRequest.path = '/' + req.httpRequest.path; - } - req.httpRequest.region = accessPointArn.region; //region used to sign - }, - - /** - * Adds Expect: 100-continue header if payload is greater-or-equal 1MB - * @api private - */ - addExpect100Continue: function addExpect100Continue(req) { - var len = req.httpRequest.headers['Content-Length']; - if (AWS.util.isNode() && (len >= 1024 * 1024 || req.params.Body instanceof AWS.util.stream.Stream)) { - req.httpRequest.headers['Expect'] = '100-continue'; - } - }, - - /** - * Adds a default content type if none is supplied. - * - * @api private - */ - addContentType: function addContentType(req) { - var httpRequest = req.httpRequest; - if (httpRequest.method === 'GET' || httpRequest.method === 'HEAD') { - // Content-Type is not set in GET/HEAD requests - delete httpRequest.headers['Content-Type']; - return; - } - - if (!httpRequest.headers['Content-Type']) { // always have a Content-Type - httpRequest.headers['Content-Type'] = 'application/octet-stream'; - } - - var contentType = httpRequest.headers['Content-Type']; - if (AWS.util.isBrowser()) { - if (typeof httpRequest.body === 'string' && !contentType.match(/;\s*charset=/)) { - var charset = '; charset=UTF-8'; - httpRequest.headers['Content-Type'] += charset; - } else { - var replaceFn = function(_, prefix, charsetName) { - return prefix + charsetName.toUpperCase(); - }; - - httpRequest.headers['Content-Type'] = - contentType.replace(/(;\s*charset=)(.+)$/, replaceFn); + }], + delete: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - } - }, - - /** - * Checks whether checksums should be computed for the request if it's not - * already set by {AWS.EventListeners.Core.COMPUTE_CHECKSUM}. It depends on - * whether {AWS.Config.computeChecksums} is set. - * - * @param req [AWS.Request] the request to check against - * @return [Boolean] whether to compute checksums for a request. - * @api private - */ - willComputeChecksums: function willComputeChecksums(req) { - var rules = req.service.api.operations[req.operation].input.members; - var body = req.httpRequest.body; - var needsContentMD5 = req.service.config.computeChecksums && - rules.ContentMD5 && - !req.params.ContentMD5 && - body && - (AWS.util.Buffer.isBuffer(req.httpRequest.body) || typeof req.httpRequest.body === 'string'); - - // Sha256 signing disabled, and not a presigned url - if (needsContentMD5 && req.service.shouldDisableBodySigning(req) && !req.isPresigned()) { - return true; - } - - // SigV2 and presign, for backwards compatibility purpose. - if (needsContentMD5 && this.getSignatureVersion(req) === 's3' && req.isPresigned()) { - return true; - } - - return false; - }, - - /** - * A listener that computes the Content-MD5 and sets it in the header. - * This listener is to support S3-specific features like - * s3DisableBodySigning and SigV2 presign. Content MD5 logic for SigV4 is - * handled in AWS.EventListeners.Core.COMPUTE_CHECKSUM - * - * @api private - */ - computeContentMd5: function computeContentMd5(req) { - if (req.service.willComputeChecksums(req)) { - var md5 = AWS.util.crypto.md5(req.httpRequest.body, 'base64'); - req.httpRequest.headers['Content-MD5'] = md5; - } - }, - - /** - * @api private - */ - computeSseCustomerKeyMd5: function computeSseCustomerKeyMd5(req) { - var keys = { - SSECustomerKey: 'x-amz-server-side-encryption-customer-key-MD5', - CopySourceSSECustomerKey: 'x-amz-copy-source-server-side-encryption-customer-key-MD5' - }; - AWS.util.each(keys, function(key, header) { - if (req.params[key]) { - var value = AWS.util.crypto.md5(req.params[key], 'base64'); - req.httpRequest.headers[header] = value; + }, { + renamed: ["reactions", "deleteLegacy"] + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - }); - }, - - /** - * Returns true if the bucket name should be left in the URI path for - * a request to S3. This function takes into account the current - * endpoint protocol (e.g. http or https). - * - * @api private - */ - pathStyleBucketName: function pathStyleBucketName(bucketName) { - // user can force path style requests via the configuration - if (this.config.s3ForcePathStyle) return true; - if (this.config.s3BucketEndpoint) return false; - - if (s3util.dnsCompatibleBucketName(bucketName)) { - return (this.config.sslEnabled && bucketName.match(/\./)) ? true : false; - } else { - return true; // not dns compatible names must always use path style - } - }, - - /** - * For COPY operations, some can be error even with status code 200. - * SDK treats the response as exception when response body indicates - * an exception or body is empty. - * - * @api private - */ - extractErrorFrom200Response: function extractErrorFrom200Response(resp) { - if (!operationsWith200StatusCodeError[resp.request.operation]) return; - var httpResponse = resp.httpResponse; - if (httpResponse.body && httpResponse.body.toString().match('')) { - // Response body with '...' indicates an exception. - // Get S3 client object. In ManagedUpload, this.service refers to - // S3 client object. - resp.data = null; - var service = this.service ? this.service : this; - service.extractError(resp); - throw resp.error; - } else if (!httpResponse.body || !httpResponse.body.toString().match(/<[\w_]/)) { - // When body is empty or incomplete, S3 might stop the request on detecting client - // side aborting the request. - resp.data = null; - throw AWS.util.error(new Error(), { - code: 'InternalError', - message: 'S3 aborted request' - }); - } - }, - - /** - * @return [Boolean] whether the error can be retried - * @api private - */ - retryableError: function retryableError(error, request) { - if (operationsWith200StatusCodeError[request.operation] && - error.statusCode === 200) { - return true; - } else if (request._requestRegionForBucket && - request.service.bucketRegionCache[request._requestRegionForBucket]) { - return false; - } else if (error && error.code === 'RequestTimeout') { - return true; - } else if (error && - regionRedirectErrorCodes.indexOf(error.code) != -1 && - error.region && error.region != request.httpRequest.region) { - request.httpRequest.region = error.region; - if (error.statusCode === 301) { - request.service.updateReqBucketRegion(request); + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - return true; - } else { - var _super = AWS.Service.prototype.retryableError; - return _super.call(this, error, request); - } - }, - - /** - * Updates httpRequest with region. If region is not provided, then - * the httpRequest will be updated based on httpRequest.region - * - * @api private - */ - updateReqBucketRegion: function updateReqBucketRegion(request, region) { - var httpRequest = request.httpRequest; - if (typeof region === 'string' && region.length) { - httpRequest.region = region; - } - if (!httpRequest.endpoint.host.match(/s3(?!-accelerate).*\.amazonaws\.com$/)) { - return; - } - var service = request.service; - var s3Config = service.config; - var s3BucketEndpoint = s3Config.s3BucketEndpoint; - if (s3BucketEndpoint) { - delete s3Config.s3BucketEndpoint; - } - var newConfig = AWS.util.copy(s3Config); - delete newConfig.endpoint; - newConfig.region = httpRequest.region; - - httpRequest.endpoint = (new AWS.S3(newConfig)).endpoint; - service.populateURI(request); - s3Config.s3BucketEndpoint = s3BucketEndpoint; - httpRequest.headers.Host = httpRequest.endpoint.host; - - if (request._asm.currentState === 'validate') { - request.removeListener('build', service.populateURI); - request.addListener('build', service.removeVirtualHostedBucketFromPath); - } - }, - - /** - * Provides a specialized parser for getBucketLocation -- all other - * operations are parsed by the super class. - * - * @api private - */ - extractData: function extractData(resp) { - var req = resp.request; - if (req.operation === 'getBucketLocation') { - var match = resp.httpResponse.body.toString().match(/>(.+)<\/Location/); - delete resp.data['_']; - if (match) { - resp.data.LocationConstraint = match[1]; - } else { - resp.data.LocationConstraint = ''; + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - } - var bucket = req.params.Bucket || null; - if (req.operation === 'deleteBucket' && typeof bucket === 'string' && !resp.error) { - req.service.clearBucketRegionCache(bucket); - } else { - var headers = resp.httpResponse.headers || {}; - var region = headers['x-amz-bucket-region'] || null; - if (!region && req.operation === 'createBucket' && !resp.error) { - var createBucketConfiguration = req.params.CreateBucketConfiguration; - if (!createBucketConfiguration) { - region = 'us-east-1'; - } else if (createBucketConfiguration.LocationConstraint === 'EU') { - region = 'eu-west-1'; - } else { - region = createBucketConfiguration.LocationConstraint; - } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - if (region) { - if (bucket && region !== req.service.bucketRegionCache[bucket]) { - req.service.bucketRegionCache[bucket] = region; - } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - } - req.service.extractRequestIds(resp); - }, - - /** - * Extracts an error object from the http response. - * - * @api private - */ - extractError: function extractError(resp) { - var codes = { - 304: 'NotModified', - 403: 'Forbidden', - 400: 'BadRequest', - 404: 'NotFound' - }; - - var req = resp.request; - var code = resp.httpResponse.statusCode; - var body = resp.httpResponse.body || ''; - - var headers = resp.httpResponse.headers || {}; - var region = headers['x-amz-bucket-region'] || null; - var bucket = req.params.Bucket || null; - var bucketRegionCache = req.service.bucketRegionCache; - if (region && bucket && region !== bucketRegionCache[bucket]) { - bucketRegionCache[bucket] = region; - } - - var cachedRegion; - if (codes[code] && body.length === 0) { - if (bucket && !region) { - cachedRegion = bucketRegionCache[bucket] || null; - if (cachedRegion !== req.httpRequest.region) { - region = cachedRegion; - } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - resp.error = AWS.util.error(new Error(), { - code: codes[code], - message: null, - region: region - }); - } else { - var data = new AWS.XML.Parser().parse(body.toString()); - - if (data.Region && !region) { - region = data.Region; - if (bucket && region !== bucketRegionCache[bucket]) { - bucketRegionCache[bucket] = region; - } - } else if (bucket && !region && !data.Region) { - cachedRegion = bucketRegionCache[bucket] || null; - if (cachedRegion !== req.httpRequest.region) { - region = cachedRegion; - } + }], + deleteLegacy: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] } - - resp.error = AWS.util.error(new Error(), { - code: data.Code || code, - message: data.Message || null, - region: region - }); - } - req.service.extractRequestIds(resp); - }, - - /** - * If region was not obtained synchronously, then send async request - * to get bucket region for errors resulting from wrong region. - * - * @api private - */ - requestBucketRegion: function requestBucketRegion(resp, done) { - var error = resp.error; - var req = resp.request; - var bucket = req.params.Bucket || null; - - if (!error || !bucket || error.region || req.operation === 'listObjects' || - (AWS.util.isNode() && req.operation === 'headBucket') || - (error.statusCode === 400 && req.operation !== 'headObject') || - regionRedirectErrorCodes.indexOf(error.code) === -1) { - return done(); - } - var reqOperation = AWS.util.isNode() ? 'headBucket' : 'listObjects'; - var reqParams = {Bucket: bucket}; - if (reqOperation === 'listObjects') reqParams.MaxKeys = 0; - var regionReq = req.service[reqOperation](reqParams); - regionReq._requestRegionForBucket = bucket; - regionReq.send(function() { - var region = req.service.bucketRegionCache[bucket] || null; - error.region = region; - done(); - }); - }, - - /** - * For browser only. If NetworkingError received, will attempt to obtain - * the bucket region. - * - * @api private - */ - reqRegionForNetworkingError: function reqRegionForNetworkingError(resp, done) { - if (!AWS.util.isBrowser()) { - return done(); - } - var error = resp.error; - var request = resp.request; - var bucket = request.params.Bucket; - if (!error || error.code !== 'NetworkingError' || !bucket || - request.httpRequest.region === 'us-east-1') { - return done(); - } - var service = request.service; - var bucketRegionCache = service.bucketRegionCache; - var cachedRegion = bucketRegionCache[bucket] || null; - - if (cachedRegion && cachedRegion !== request.httpRequest.region) { - service.updateReqBucketRegion(request, cachedRegion); - done(); - } else if (!s3util.dnsCompatibleBucketName(bucket)) { - service.updateReqBucketRegion(request, 'us-east-1'); - if (bucketRegionCache[bucket] !== 'us-east-1') { - bucketRegionCache[bucket] = 'us-east-1'; + }, { + deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - done(); - } else if (request.httpRequest.virtualHostedBucket) { - var getRegionReq = service.listObjects({Bucket: bucket, MaxKeys: 0}); - service.updateReqBucketRegion(getRegionReq, 'us-east-1'); - getRegionReq._requestRegionForBucket = bucket; - - getRegionReq.send(function() { - var region = service.bucketRegionCache[bucket] || null; - if (region && region !== request.httpRequest.region) { - service.updateReqBucketRegion(request, region); - } - done(); - }); - } else { - // DNS-compatible path-style - // (s3ForcePathStyle or bucket name with dot over https) - // Cannot obtain region information for this case - done(); - } - }, - - /** - * Cache for bucket region. - * - * @api private - */ - bucketRegionCache: {}, - - /** - * Clears bucket region cache. - * - * @api private - */ - clearBucketRegionCache: function(buckets) { - var bucketRegionCache = this.bucketRegionCache; - if (!buckets) { - buckets = Object.keys(bucketRegionCache); - } else if (typeof buckets === 'string') { - buckets = [buckets]; - } - for (var i = 0; i < buckets.length; i++) { - delete bucketRegionCache[buckets[i]]; - } - return bucketRegionCache; - }, - - /** - * Corrects request region if bucket's cached region is different - * - * @api private - */ - correctBucketRegionFromCache: function correctBucketRegionFromCache(req) { - var bucket = req.params.Bucket || null; - if (bucket) { - var service = req.service; - var requestRegion = req.httpRequest.region; - var cachedRegion = service.bucketRegionCache[bucket]; - if (cachedRegion && cachedRegion !== requestRegion) { - service.updateReqBucketRegion(req, cachedRegion); + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - } - }, - - /** - * Extracts S3 specific request ids from the http response. - * - * @api private - */ - extractRequestIds: function extractRequestIds(resp) { - var extendedRequestId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-id-2'] : null; - var cfId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-cf-id'] : null; - resp.extendedRequestId = extendedRequestId; - resp.cfId = cfId; - - if (resp.error) { - resp.error.requestId = resp.requestId || null; - resp.error.extendedRequestId = extendedRequestId; - resp.error.cfId = cfId; - } - }, - - /** - * Get a pre-signed URL for a given operation name. - * - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * @note Not all operation parameters are supported when using pre-signed - * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, - * `ContentLength`, or `Tagging` must be provided as headers when sending a - * request. If you are using pre-signed URLs to upload from a browser and - * need to use these fields, see {createPresignedPost}. - * @note The default signer allows altering the request by adding corresponding - * headers to set some parameters (e.g. Range) and these added parameters - * won't be signed. You must use signatureVersion v4 to to include these - * parameters in the signed portion of the URL and enforce exact matching - * between headers and signed params in the URL. - * @note This operation cannot be used with a promise. See note above regarding - * asynchronous credentials and use with a callback. - * @param operation [String] the name of the operation to call - * @param params [map] parameters to pass to the operation. See the given - * operation for the expected operation parameters. In addition, you can - * also pass the "Expires" parameter to inform S3 how long the URL should - * work for. - * @option params Expires [Integer] (900) the number of seconds to expire - * the pre-signed URL operation in. Defaults to 15 minutes. - * @param callback [Function] if a callback is provided, this function will - * pass the URL as the second parameter (after the error parameter) to - * the callback function. - * @return [String] if called synchronously (with no callback), returns the - * signed URL. - * @return [null] nothing is returned if a callback is provided. - * @example Pre-signing a getObject operation (synchronously) - * var params = {Bucket: 'bucket', Key: 'key'}; - * var url = s3.getSignedUrl('getObject', params); - * console.log('The URL is', url); - * @example Pre-signing a putObject (asynchronously) - * var params = {Bucket: 'bucket', Key: 'key'}; - * s3.getSignedUrl('putObject', params, function (err, url) { - * console.log('The URL is', url); - * }); - * @example Pre-signing a putObject operation with a specific payload - * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; - * var url = s3.getSignedUrl('putObject', params); - * console.log('The URL is', url); - * @example Passing in a 1-minute expiry time for a pre-signed URL - * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; - * var url = s3.getSignedUrl('getObject', params); - * console.log('The URL is', url); // expires in 60 seconds - */ - getSignedUrl: function getSignedUrl(operation, params, callback) { - params = AWS.util.copy(params || {}); - var expires = params.Expires || 900; - - if (typeof expires !== 'number') { - throw AWS.util.error(new Error(), - { code: 'InvalidParameterException', message: 'The expiration must be a number, received ' + typeof expires }); - } - - delete params.Expires; // we can't validate this - var request = this.makeRequest(operation, params); - - if (callback) { - AWS.util.defer(function() { - request.presign(expires, callback); - }); - } else { - return request.presign(expires, callback); - } - }, - - /** - * @!method getSignedUrlPromise() - * Returns a 'thenable' promise that will be resolved with a pre-signed URL - * for a given operation name. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @note Not all operation parameters are supported when using pre-signed - * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, - * `ContentLength`, or `Tagging` must be provided as headers when sending a - * request. If you are using pre-signed URLs to upload from a browser and - * need to use these fields, see {createPresignedPost}. - * @param operation [String] the name of the operation to call - * @param params [map] parameters to pass to the operation. See the given - * operation for the expected operation parameters. In addition, you can - * also pass the "Expires" parameter to inform S3 how long the URL should - * work for. - * @option params Expires [Integer] (900) the number of seconds to expire - * the pre-signed URL operation in. Defaults to 15 minutes. - * @callback fulfilledCallback function(url) - * Called if the promise is fulfilled. - * @param url [String] the signed url - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled - * @return [Promise] A promise that represents the state of the `refresh` call. - * @example Pre-signing a getObject operation - * var params = {Bucket: 'bucket', Key: 'key'}; - * var promise = s3.getSignedUrlPromise('getObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - * @example Pre-signing a putObject operation with a specific payload - * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; - * var promise = s3.getSignedUrlPromise('putObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - * @example Passing in a 1-minute expiry time for a pre-signed URL - * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; - * var promise = s3.getSignedUrlPromise('getObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - */ - - /** - * Get a pre-signed POST policy to support uploading to S3 directly from an - * HTML form. - * - * @param params [map] - * @option params Bucket [String] The bucket to which the post should be - * uploaded - * @option params Expires [Integer] (3600) The number of seconds for which - * the presigned policy should be valid. - * @option params Conditions [Array] An array of conditions that must be met - * for the presigned policy to allow the - * upload. This can include required tags, - * the accepted range for content lengths, - * etc. - * @see http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html - * @option params Fields [map] Fields to include in the form. All - * values passed in as fields will be - * signed as exact match conditions. - * @param callback [Function] - * - * @note All fields passed in when creating presigned post data will be signed - * as exact match conditions. Any fields that will be interpolated by S3 - * must be added to the fields hash after signing, and an appropriate - * condition for such fields must be explicitly added to the Conditions - * array passed to this function before signing. - * - * @example Presiging post data with a known key - * var params = { - * Bucket: 'bucket', - * Fields: { - * key: 'key' - * } - * }; - * s3.createPresignedPost(params, function(err, data) { - * if (err) { - * console.error('Presigning post data encountered an error', err); - * } else { - * console.log('The post data is', data); - * } - * }); - * - * @example Presigning post data with an interpolated key - * var params = { - * Bucket: 'bucket', - * Conditions: [ - * ['starts-with', '$key', 'path/to/uploads/'] - * ] - * }; - * s3.createPresignedPost(params, function(err, data) { - * if (err) { - * console.error('Presigning post data encountered an error', err); - * } else { - * data.Fields.key = 'path/to/uploads/${filename}'; - * console.log('The post data is', data); - * } - * }); - * - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * - * @return [map] If called synchronously (with no callback), returns a hash - * with the url to set as the form action and a hash of fields - * to include in the form. - * @return [null] Nothing is returned if a callback is provided. - * - * @callback callback function (err, data) - * @param err [Error] the error object returned from the policy signer - * @param data [map] The data necessary to construct an HTML form - * @param data.url [String] The URL to use as the action of the form - * @param data.fields [map] A hash of fields that must be included in the - * form for the upload to succeed. This hash will - * include the signed POST policy, your access key - * ID and security token (if present), etc. These - * may be safely included as input elements of type - * 'hidden.' - */ - createPresignedPost: function createPresignedPost(params, callback) { - if (typeof params === 'function' && callback === undefined) { - callback = params; - params = null; - } - - params = AWS.util.copy(params || {}); - var boundParams = this.config.params || {}; - var bucket = params.Bucket || boundParams.Bucket, - self = this, - config = this.config, - endpoint = AWS.util.copy(this.endpoint); - if (!config.s3BucketEndpoint) { - endpoint.pathname = '/' + bucket; - } - - function finalizePost() { - return { - url: AWS.util.urlFormat(endpoint), - fields: self.preparePostFields( - config.credentials, - config.region, - bucket, - params.Fields, - params.Conditions, - params.Expires - ) - }; - } - - if (callback) { - config.getCredentials(function (err) { - if (err) { - callback(err); - } else { - try { - callback(null, finalizePost()); - } catch (err) { - callback(err); - } - } - }); - } else { - return finalizePost(); - } - }, - - /** - * @api private - */ - preparePostFields: function preparePostFields( - credentials, - region, - bucket, - fields, - conditions, - expiresInSeconds - ) { - var now = this.getSkewCorrectedDate(); - if (!credentials || !region || !bucket) { - throw new Error('Unable to create a POST object policy without a bucket,' - + ' region, and credentials'); - } - fields = AWS.util.copy(fields || {}); - conditions = (conditions || []).slice(0); - expiresInSeconds = expiresInSeconds || 3600; - - var signingDate = AWS.util.date.iso8601(now).replace(/[:\-]|\.\d{3}/g, ''); - var shortDate = signingDate.substr(0, 8); - var scope = v4Credentials.createScope(shortDate, region, 's3'); - var credential = credentials.accessKeyId + '/' + scope; - - fields['bucket'] = bucket; - fields['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256'; - fields['X-Amz-Credential'] = credential; - fields['X-Amz-Date'] = signingDate; - if (credentials.sessionToken) { - fields['X-Amz-Security-Token'] = credentials.sessionToken; - } - for (var field in fields) { - if (fields.hasOwnProperty(field)) { - var condition = {}; - condition[field] = fields[field]; - conditions.push(condition); + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] } - } - - fields.Policy = this.preparePostPolicy( - new Date(now.valueOf() + expiresInSeconds * 1000), - conditions - ); - fields['X-Amz-Signature'] = AWS.util.crypto.hmac( - v4Credentials.getSigningKey(credentials, shortDate, region, 's3', true), - fields.Policy, - 'hex' - ); - - return fields; - }, - - /** - * @api private - */ - preparePostPolicy: function preparePostPolicy(expiration, conditions) { - return AWS.util.base64.encode(JSON.stringify({ - expiration: AWS.util.date.iso8601(expiration), - conditions: conditions - })); - }, - - /** - * @api private - */ - prepareSignedUrl: function prepareSignedUrl(request) { - request.addListener('validate', request.service.noPresignedContentLength); - request.removeListener('build', request.service.addContentType); - if (!request.params.Body) { - // no Content-MD5/SHA-256 if body is not provided - request.removeListener('build', request.service.computeContentMd5); - } else { - request.addListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); - } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] }, - - /** - * @api private - * @param request - */ - disableBodySigning: function disableBodySigning(request) { - var headers = request.httpRequest.headers; - // Add the header to anything that isn't a presigned url, unless that presigned url had a body defined - if (!Object.prototype.hasOwnProperty.call(headers, 'presigned-expires')) { - headers['X-Amz-Content-Sha256'] = 'UNSIGNED-PAYLOAD'; - } - }, - - /** - * @api private - */ - noPresignedContentLength: function noPresignedContentLength(request) { - if (request.params.ContentLength !== undefined) { - throw AWS.util.error(new Error(), {code: 'UnexpectedParameter', - message: 'ContentLength is not supported in pre-signed URLs.'}); - } - }, - - createBucket: function createBucket(params, callback) { - // When creating a bucket *outside* the classic region, the location - // constraint must be set for the bucket and it must match the endpoint. - // This chunk of code will set the location constraint param based - // on the region (when possible), but it will not override a passed-in - // location constraint. - if (typeof params === 'function' || !params) { - callback = callback || params; - params = {}; - } - var hostname = this.endpoint.hostname; - // copy params so that appending keys does not unintentioinallly - // mutate params object argument passed in by user - var copiedParams = AWS.util.copy(params); - - if (hostname !== this.api.globalEndpoint && !params.CreateBucketConfiguration) { - copiedParams.CreateBucketConfiguration = { LocationConstraint: this.config.region }; - } - return this.makeRequest('createBucket', copiedParams, callback); + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addDeployKey: ["POST /repos/{owner}/{repo}/keys", {}, { + renamed: ["repos", "createDeployKey"] + }], + addProtectedBranchAdminEnforcement: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "setAdminBranchProtection"] + }], + addProtectedBranchAppRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "addAppAccessRestrictions"] + }], + addProtectedBranchRequiredSignatures: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "createCommitSignatureProtection"] + }], + addProtectedBranchRequiredStatusChecksContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "addStatusCheckContexts"] + }], + addProtectedBranchTeamRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "addTeamAccessRestrictions"] + }], + addProtectedBranchUserRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "addUserAccessRestrictions"] + }], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createHook: ["POST /repos/{owner}/{repo}/hooks", {}, { + renamed: ["repos", "createWebhook"] + }], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateFile: ["PUT /repos/{owner}/{repo}/contents/{path}", {}, { + renamed: ["repos", "createOrUpdateFileContents"] + }], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}", {}, { + renamed: ["repos", "createCommitStatus"] + }], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDownload: ["DELETE /repos/{owner}/{repo}/downloads/{download_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteHook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "deleteWebhook"] + }], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disablePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }, { + renamed: ["repos", "deletePagesSite"] + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enablePagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }, { + renamed: ["repos", "createPagesSite"] + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getArchiveLink: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}", {}, { + renamed: ["repos", "downloadArchive"] + }], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContents: ["GET /repos/{owner}/{repo}/contents/{path}", {}, { + renamed: ["repos", "getContent"] + }], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getDownload: ["GET /repos/{owner}/{repo}/downloads/{download_id}"], + getHook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "getWebhook"] + }], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getProtectedBranchAdminEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "getAdminBranchProtection"] + }], + getProtectedBranchPullRequestReviewEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "getPullRequestReviewProtection"] + }], + getProtectedBranchRequiredSignatures: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "getCommitSignatureProtection"] + }], + getProtectedBranchRequiredStatusChecks: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "getStatusChecksProtection"] + }], + getProtectedBranchRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { + renamed: ["repos", "getAccessRestrictions"] + }], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + list: ["GET /user/repos", {}, { + renamed: ["repos", "listForAuthenticatedUser"] + }], + listAssetsForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets", {}, { + renamed: ["repos", "listReleaseAssets"] + }], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitComments: ["GET /repos/{owner}/{repo}/comments", {}, { + renamed: ["repos", "listCommitCommentsForRepo"] + }], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listDownloads: ["GET /repos/{owner}/{repo}/downloads"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listHooks: ["GET /repos/{owner}/{repo}/hooks", {}, { + renamed: ["repos", "listWebhooks"] + }], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listProtectedBranchRequiredStatusChecksContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + renamed: ["repos", "getAllStatusCheckContexts"] + }], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses", {}, { + renamed: ["repos", "listCommitStatusesForRef"] + }], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }, { + renamed: ["repos", "getAllTopics"] + }], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings", {}, { + renamed: ["repos", "pingWebhook"] + }], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection", {}, { + renamed: ["repos", "deleteBranchProtection"] + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}", {}, { + renamed: ["repos", "deleteDeployKey"] + }], + removeProtectedBranchAdminEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "deleteAdminBranchProtection"] + }], + removeProtectedBranchAppRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "removeAppAccessRestrictions"] + }], + removeProtectedBranchPullRequestReviewEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "deletePullRequestReviewProtection"] + }], + removeProtectedBranchRequiredSignatures: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "deleteCommitSignatureProtection"] + }], + removeProtectedBranchRequiredStatusChecks: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "removeStatusChecksProtection"] + }], + removeProtectedBranchRequiredStatusChecksContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "removeStatusCheckContexts"] + }], + removeProtectedBranchRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { + renamed: ["repos", "deleteAccessRestrictions"] + }], + removeProtectedBranchTeamRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "removeTeamAccessRestrictions"] + }], + removeProtectedBranchUserRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "removeUserAccessRestrictions"] + }], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + replaceProtectedBranchAppRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "setAppAccessRestrictions"] + }], + replaceProtectedBranchRequiredStatusChecksContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "setStatusCheckContexts"] + }], + replaceProtectedBranchTeamRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "setTeamAccessRestrictions"] + }], + replaceProtectedBranchUserRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "setUserAccessRestrictions"] + }], + replaceTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }, { + renamed: ["repos", "replaceAllTopics"] + }], + requestPageBuild: ["POST /repos/{owner}/{repo}/pages/builds", {}, { + renamed: ["repos", "requestPagesBuild"] + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + retrieveCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", {}, { + renamed: ["repos", "getCommunityProfileMetrics"] + }], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests", {}, { + renamed: ["repos", "testPushWebhook"] + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateHook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "updateWebhook"] + }], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updateProtectedBranchPullRequestReviewEnforcement: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "updatePullRequestReviewProtection"] + }], + updateProtectedBranchRequiredStatusChecks: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusChecksProtection"] + }], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] }, - - writeGetObjectResponse: function writeGetObjectResponse(params, callback) { - - var request = this.makeRequest('writeGetObjectResponse', AWS.util.copy(params), callback); - var hostname = this.endpoint.hostname; - if (hostname.indexOf(this.config.region) !== -1) { - // hostname specifies a region already - hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.'); - } else { - // Hostname doesn't have a region. - // Object Lambda requires an explicit region. - hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.' + this.config.region + '.'); - } - - request.httpRequest.endpoint = new AWS.Endpoint(hostname, this.config); - return request; + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] }, - - /** - * @see AWS.S3.ManagedUpload - * @overload upload(params = {}, [options], [callback]) - * Uploads an arbitrarily sized buffer, blob, or stream, using intelligent - * concurrent handling of parts if the payload is large enough. You can - * configure the concurrent queue size by setting `options`. Note that this - * is the only operation for which the SDK can retry requests with stream - * bodies. - * - * @param (see AWS.S3.putObject) - * @option (see AWS.S3.ManagedUpload.constructor) - * @return [AWS.S3.ManagedUpload] the managed upload object that can call - * `send()` or track progress. - * @example Uploading a stream object - * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - * s3.upload(params, function(err, data) { - * console.log(err, data); - * }); - * @example Uploading a stream with concurrency of 1 and partSize of 10mb - * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - * var options = {partSize: 10 * 1024 * 1024, queueSize: 1}; - * s3.upload(params, options, function(err, data) { - * console.log(err, data); - * }); - * @callback callback function(err, data) - * @param err [Error] an error or null if no error occurred. - * @param data [map] The response data from the successful upload: - * @param data.Location [String] the URL of the uploaded object - * @param data.ETag [String] the ETag of the uploaded object - * @param data.Bucket [String] the bucket to which the object was uploaded - * @param data.Key [String] the key to which the object was uploaded - */ - upload: function upload(params, options, callback) { - if (typeof options === 'function' && callback === undefined) { - callback = options; - options = null; - } - - options = options || {}; - options = AWS.util.merge(options || {}, {service: this, params: params}); - - var uploader = new AWS.S3.ManagedUpload(options); - if (typeof callback === 'function') uploader.send(callback); - return uploader; + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateMembershipInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "addOrUpdateMembershipForUserInOrg"] + }], + addOrUpdateProjectInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["teams", "addOrUpdateProjectPermissionsInOrg"] + }], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { + renamed: ["teams", "addOrUpdateRepoPermissionsInOrg"] + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkManagesRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { + renamed: ["teams", "checkPermissionsForRepoInOrg"] + }], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getMembershipInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "getMembershipForUserInOrg"] + }], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeMembershipInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "removeMembershipForUserInOrg"] + }], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + reviewProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["teams", "checkPermissionsForProjectInOrg"] + }], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + addEmails: ["POST /user/emails", {}, { + renamed: ["users", "addEmailsForAuthenticated"] + }], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowing: ["GET /user/following/{username}", {}, { + renamed: ["users", "checkPersonIsFollowedByAuthenticated"] + }], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKey: ["POST /user/gpg_keys", {}, { + renamed: ["users", "createGpgKeyForAuthenticated"] + }], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicKey: ["POST /user/keys", {}, { + renamed: ["users", "createPublicSshKeyForAuthenticated"] + }], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteEmails: ["DELETE /user/emails", {}, { + renamed: ["users", "deleteEmailsForAuthenticated"] + }], + deleteGpgKey: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "deleteGpgKeyForAuthenticated"] + }], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicKey: ["DELETE /user/keys/{key_id}", {}, { + renamed: ["users", "deletePublicSshKeyForAuthenticated"] + }], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKey: ["GET /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "getGpgKeyForAuthenticated"] + }], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicKey: ["GET /user/keys/{key_id}", {}, { + renamed: ["users", "getPublicSshKeyForAuthenticated"] + }], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlocked: ["GET /user/blocks", {}, { + renamed: ["users", "listBlockedByAuthenticated"] + }], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmails: ["GET /user/emails", {}, { + renamed: ["users", "listEmailsForAuthenticated"] + }], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForAuthenticatedUser: ["GET /user/following", {}, { + renamed: ["users", "listFollowedByAuthenticated"] + }], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeys: ["GET /user/gpg_keys", {}, { + renamed: ["users", "listGpgKeysForAuthenticated"] + }], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmails: ["GET /user/public_emails", {}, { + renamed: ["users", "listPublicEmailsForAuthenticatedUser"] + }], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeys: ["GET /user/keys", {}, { + renamed: ["users", "listPublicSshKeysForAuthenticated"] + }], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + togglePrimaryEmailVisibility: ["PATCH /user/email/visibility", {}, { + renamed: ["users", "setPrimaryEmailVisibilityForAuthenticated"] + }], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] } -}); - -/** - * @api private - */ -AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency); }; -/** - * @api private - */ -AWS.S3.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.getSignedUrlPromise; -}; +const VERSION = "3.17.0"; -AWS.util.addPromises(AWS.S3); +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); -/***/ }), -/* 159 */, -/* 160 */ -/***/ (function(module) { + if (!newMethods[scope]) { + newMethods[scope] = {}; + } -var ENV_ENDPOINT_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT'; -var CONFIG_ENDPOINT_NAME = 'ec2_metadata_service_endpoint'; + const scopeMethods = newMethods[scope]; -var getEndpointConfigOptions = function() { - return { - environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_NAME]; }, - configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_NAME]; }, - default: undefined, - }; -}; + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } -module.exports = getEndpointConfigOptions; + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + return newMethods; +} -/***/ }), -/* 161 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); -"use strict"; + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.requiredEnvVars = void 0; -const github = __importStar(__webpack_require__(469)); -const githubCore = __importStar(__webpack_require__(470)); -const s3Client_1 = __importDefault(__webpack_require__(882)); -const s3UploadDirectory_1 = __importDefault(__webpack_require__(704)); -const validateEnvVars_1 = __importDefault(__webpack_require__(732)); -const checkBucketExists_1 = __importDefault(__webpack_require__(222)); -const githubClient_1 = __importDefault(__webpack_require__(119)); -const deactivateDeployments_1 = __importDefault(__webpack_require__(14)); -exports.requiredEnvVars = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'GITHUB_TOKEN']; -exports.default = (bucketName, uploadDirectory, environmentPrefix) => __awaiter(void 0, void 0, void 0, function* () { - const websiteUrl = `https://s3.amazonaws.com/${bucketName}/index.html`; - const { repo } = github.context; - const branchName = github.context.payload.pull_request.head.ref; - console.log('PR Updated'); - validateEnvVars_1.default(exports.requiredEnvVars); - const bucketExists = yield checkBucketExists_1.default(bucketName); - if (!bucketExists) { - console.log(`S3 bucket does not exist. Creating ${bucketName}...`); - yield s3Client_1.default.createBucket({ Bucket: bucketName }).promise(); - yield s3Client_1.default.putBucketOwnershipControls({ - Bucket: bucketName, - OwnershipControls: { - Rules: [ - { - ObjectOwnership: 'ObjectWriter' - } - ] - } - }).promise(); - yield s3Client_1.default.putPublicAccessBlock({ - Bucket: bucketName, - PublicAccessBlockConfiguration: { - BlockPublicAcls: false, - BlockPublicPolicy: false, - IgnorePublicAcls: false, - RestrictPublicBuckets: false - } - }).promise(); - console.log('Configuring bucket website...'); - yield s3Client_1.default.putBucketWebsite({ - Bucket: bucketName, - WebsiteConfiguration: { - IndexDocument: { Suffix: 'index.html' }, - ErrorDocument: { Key: 'index.html' } - } - }).promise(); - } - else { - console.log('S3 Bucket already exists. Skipping creation...'); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } // NOTE: there are currently no deprecations. But we keep the code + // below for future reference + + + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); } - yield deactivateDeployments_1.default(repo, environmentPrefix); - const deployment = yield githubClient_1.default.repos.createDeployment(Object.assign(Object.assign({}, repo), { ref: `refs/heads/${branchName}`, environment: `${environmentPrefix || 'PR-'}${github.context.payload.pull_request.number}`, auto_merge: false, transient_environment: true, required_contexts: [] })); - if (isSuccessResponse(deployment.data)) { - yield githubClient_1.default.repos.createDeploymentStatus(Object.assign(Object.assign({}, repo), { deployment_id: deployment.data.id, state: 'in_progress' })); - console.log('Uploading files...'); - yield s3UploadDirectory_1.default(bucketName, uploadDirectory); - yield githubClient_1.default.repos.createDeploymentStatus(Object.assign(Object.assign({}, repo), { deployment_id: deployment.data.id, state: 'success', environment_url: websiteUrl })); - githubCore.setOutput('websiteUrl', websiteUrl); - console.log(`Website URL: ${websiteUrl}`); + + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); } -}); -function isSuccessResponse(object) { - return 'id' in object; -} + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); -/***/ }), -/* 162 */, -/* 163 */, -/* 164 */, -/* 165 */ -/***/ (function(module) { + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + // There is currently no deprecated parameter that is optional, + // so we never hit the else branch below at this point. -function apiLoader(svc, version) { - if (!apiLoader.services.hasOwnProperty(svc)) { - throw new Error('InvalidService: Failed to load api for ' + svc); + /* istanbul ignore else */ + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + + if (!(alias in options)) { + options[alias] = options[name]; + } + + delete options[name]; + } + } + + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + + + return requestWithDefaults(...args); } - return apiLoader.services[svc][version]; + + return Object.assign(withDecorations, requestWithDefaults); } /** - * @api private + * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary + * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is + * done, we will remove the registerEndpoints methods and return the methods + * directly as with the other plugins. At that point we will also remove the + * legacy workarounds and deprecations. * - * This member of AWS.apiLoader is private, but changing it will necessitate a - * change to ../scripts/services-table-generator.ts + * See the plan at + * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 */ -apiLoader.services = {}; -/** - * @api private - */ -module.exports = apiLoader; +function restEndpointMethods(octokit) { + return endpointsToMethods(octokit, Endpoints); +} +restEndpointMethods.VERSION = VERSION; + +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map /***/ }), -/* 166 */, -/* 167 */, -/* 168 */ -/***/ (function(module) { + +/***/ 537: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const alias = ['stdin', 'stdout', 'stderr']; -const hasAlias = opts => alias.some(x => Boolean(opts[x])); +Object.defineProperty(exports, "__esModule", ({ value: true })); -module.exports = opts => { - if (!opts) { - return null; - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); - } +var deprecation = __nccwpck_require__(8932); +var once = _interopDefault(__nccwpck_require__(1223)); - if (typeof opts.stdio === 'string') { - return opts.stdio; - } +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ - const stdio = opts.stdio || []; +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } + /* istanbul ignore next */ - const result = []; - const len = Math.max(stdio.length, alias.length); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - for (let i = 0; i < len; i++) { - let value = null; + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; - } + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options - result[i] = value; - } + const requestCopy = Object.assign({}, options.request); - return result; -}; + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map /***/ }), -/* 169 */, -/* 170 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { -var AWS = __webpack_require__(395); -var CognitoIdentity = __webpack_require__(214); -var STS = __webpack_require__(106); +/***/ 6234: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Represents credentials retrieved from STS Web Identity Federation using - * the Amazon Cognito Identity service. - * - * By default this provider gets credentials using the - * {AWS.CognitoIdentity.getCredentialsForIdentity} service operation, which - * requires either an `IdentityId` or an `IdentityPoolId` (Amazon Cognito - * Identity Pool ID), which is used to call {AWS.CognitoIdentity.getId} to - * obtain an `IdentityId`. If the identity or identity pool is not configured in - * the Amazon Cognito Console to use IAM roles with the appropriate permissions, - * then additionally a `RoleArn` is required containing the ARN of the IAM trust - * policy for the Amazon Cognito role that the user will log into. If a `RoleArn` - * is provided, then this provider gets credentials using the - * {AWS.STS.assumeRoleWithWebIdentity} service operation, after first getting an - * Open ID token from {AWS.CognitoIdentity.getOpenIdToken}. - * - * In addition, if this credential provider is used to provide authenticated - * login, the `Logins` map may be set to the tokens provided by the respective - * identity providers. See {constructor} for an example on creating a credentials - * object with proper property values. - * - * ## Refreshing Credentials from Identity Service - * - * In addition to AWS credentials expiring after a given amount of time, the - * login token from the identity provider will also expire. Once this token - * expires, it will not be usable to refresh AWS credentials, and another - * token will be needed. The SDK does not manage refreshing of the token value, - * but this can be done through a "refresh token" supported by most identity - * providers. Consult the documentation for the identity provider for refreshing - * tokens. Once the refreshed token is acquired, you should make sure to update - * this new token in the credentials object's {params} property. The following - * code will update the WebIdentityToken, assuming you have retrieved an updated - * token from the identity provider: - * - * ```javascript - * AWS.config.credentials.params.Logins['graph.facebook.com'] = updatedToken; - * ``` - * - * Future calls to `credentials.refresh()` will now use the new token. - * - * @!attribute params - * @return [map] the map of params passed to - * {AWS.CognitoIdentity.getId}, - * {AWS.CognitoIdentity.getOpenIdToken}, and - * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the - * `params.WebIdentityToken` property. - * @!attribute data - * @return [map] the raw data response from the call to - * {AWS.CognitoIdentity.getCredentialsForIdentity}, or - * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get - * access to other properties from the response. - * @!attribute identityId - * @return [String] the Cognito ID returned by the last call to - * {AWS.CognitoIdentity.getOpenIdToken}. This ID represents the actual - * final resolved identity ID from Amazon Cognito. - */ -AWS.CognitoIdentityCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * @api private - */ - localStorageKey: { - id: 'aws.cognito.identity-id.', - providers: 'aws.cognito.identity-providers.' - }, +"use strict"; - /** - * Creates a new credentials object. - * @example Creating a new credentials object - * AWS.config.credentials = new AWS.CognitoIdentityCredentials({ - * - * // either IdentityPoolId or IdentityId is required - * // See the IdentityPoolId param for AWS.CognitoIdentity.getID (linked below) - * // See the IdentityId param for AWS.CognitoIdentity.getCredentialsForIdentity - * // or AWS.CognitoIdentity.getOpenIdToken (linked below) - * IdentityPoolId: 'us-east-1:1699ebc0-7900-4099-b910-2df94f52a030', - * IdentityId: 'us-east-1:128d0a74-c82f-4553-916d-90053e4a8b0f' - * - * // optional, only necessary when the identity pool is not configured - * // to use IAM roles in the Amazon Cognito Console - * // See the RoleArn param for AWS.STS.assumeRoleWithWebIdentity (linked below) - * RoleArn: 'arn:aws:iam::1234567890:role/MYAPP-CognitoIdentity', - * - * // optional tokens, used for authenticated login - * // See the Logins param for AWS.CognitoIdentity.getID (linked below) - * Logins: { - * 'graph.facebook.com': 'FBTOKEN', - * 'www.amazon.com': 'AMAZONTOKEN', - * 'accounts.google.com': 'GOOGLETOKEN', - * 'api.twitter.com': 'TWITTERTOKEN', - * 'www.digits.com': 'DIGITSTOKEN' - * }, - * - * // optional name, defaults to web-identity - * // See the RoleSessionName param for AWS.STS.assumeRoleWithWebIdentity (linked below) - * RoleSessionName: 'web', - * - * // optional, only necessary when application runs in a browser - * // and multiple users are signed in at once, used for caching - * LoginId: 'example@gmail.com' - * - * }, { - * // optionally provide configuration to apply to the underlying service clients - * // if configuration is not provided, then configuration will be pulled from AWS.config - * - * // region should match the region your identity pool is located in - * region: 'us-east-1', - * - * // specify timeout options - * httpOptions: { - * timeout: 100 - * } - * }); - * @see AWS.CognitoIdentity.getId - * @see AWS.CognitoIdentity.getCredentialsForIdentity - * @see AWS.STS.assumeRoleWithWebIdentity - * @see AWS.CognitoIdentity.getOpenIdToken - * @see AWS.Config - * @note If a region is not provided in the global AWS.config, or - * specified in the `clientConfig` to the CognitoIdentityCredentials - * constructor, you may encounter a 'Missing credentials in config' error - * when calling making a service call. - */ - constructor: function CognitoIdentityCredentials(params, clientConfig) { - AWS.Credentials.call(this); - this.expired = true; - this.params = params; - this.data = null; - this._identityId = null; - this._clientConfig = AWS.util.copy(clientConfig || {}); - this.loadCachedId(); - var self = this; - Object.defineProperty(this, 'identityId', { - get: function() { - self.loadCachedId(); - return self._identityId || self.params.IdentityId; - }, - set: function(identityId) { - self._identityId = identityId; - } - }); - }, - /** - * Refreshes credentials using {AWS.CognitoIdentity.getCredentialsForIdentity}, - * or {AWS.STS.assumeRoleWithWebIdentity}. - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see AWS.Credentials.get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * @api private - * @param callback - */ - load: function load(callback) { - var self = this; - self.createClients(); - self.data = null; - self._identityId = null; - self.getId(function(err) { - if (!err) { - if (!self.params.RoleArn) { - self.getCredentialsForIdentity(callback); - } else { - self.getCredentialsFromSTS(callback); - } - } else { - self.clearIdOnNotAuthorized(err); - callback(err); - } - }); - }, +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - /** - * Clears the cached Cognito ID associated with the currently configured - * identity pool ID. Use this to manually invalidate your cache if - * the identity pool ID was deleted. - */ - clearCachedId: function clearCache() { - this._identityId = null; - delete this.params.IdentityId; +var endpoint = __nccwpck_require__(9440); +var universalUserAgent = __nccwpck_require__(5030); +var isPlainObject = _interopDefault(__nccwpck_require__(8840)); +var nodeFetch = _interopDefault(__nccwpck_require__(467)); +var requestError = __nccwpck_require__(537); - var poolId = this.params.IdentityPoolId; - var loginId = this.params.LoginId || ''; - delete this.storage[this.localStorageKey.id + poolId + loginId]; - delete this.storage[this.localStorageKey.providers + poolId + loginId]; - }, +const VERSION = "5.4.4"; - /** - * @api private - */ - clearIdOnNotAuthorized: function clearIdOnNotAuthorized(err) { - var self = this; - if (err.code == 'NotAuthorizedException') { - self.clearCachedId(); - } - }, +function getBufferResponse(response) { + return response.arrayBuffer(); +} - /** - * Retrieves a Cognito ID, loading from cache if it was already retrieved - * on this device. - * - * @callback callback function(err, identityId) - * @param err [Error, null] an error object if the call failed or null if - * it succeeded. - * @param identityId [String, null] if successful, the callback will return - * the Cognito ID. - * @note If not loaded explicitly, the Cognito ID is loaded and stored in - * localStorage in the browser environment of a device. - * @api private - */ - getId: function getId(callback) { - var self = this; - if (typeof self.params.IdentityId === 'string') { - return callback(null, self.params.IdentityId); - } +function fetchWrapper(requestOptions) { + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } - self.cognito.getId(function(err, data) { - if (!err && data.IdentityId) { - self.params.IdentityId = data.IdentityId; - callback(null, data.IdentityId); - } else { - callback(err); - } - }); - }, + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } - /** - * @api private - */ - loadCredentials: function loadCredentials(data, credentials) { - if (!data || !credentials) return; - credentials.expired = false; - credentials.accessKeyId = data.Credentials.AccessKeyId; - credentials.secretAccessKey = data.Credentials.SecretKey; - credentials.sessionToken = data.Credentials.SessionToken; - credentials.expireTime = data.Credentials.Expiration; - }, + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests - /** - * @api private - */ - getCredentialsForIdentity: function getCredentialsForIdentity(callback) { - var self = this; - self.cognito.getCredentialsForIdentity(function(err, data) { - if (!err) { - self.cacheId(data); - self.data = data; - self.loadCredentials(self.data, self); - } else { - self.clearIdOnNotAuthorized(err); - } - callback(err); - }); - }, - /** - * @api private - */ - getCredentialsFromSTS: function getCredentialsFromSTS(callback) { - var self = this; - self.cognito.getOpenIdToken(function(err, data) { - if (!err) { - self.cacheId(data); - self.params.WebIdentityToken = data.Token; - self.webIdentityCredentials.refresh(function(webErr) { - if (!webErr) { - self.data = self.webIdentityCredentials.data; - self.sts.credentialsFrom(self.data, self); - } - callback(webErr); - }); - } else { - self.clearIdOnNotAuthorized(err); - callback(err); + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; } - }); - }, - /** - * @api private - */ - loadCachedId: function loadCachedId() { - var self = this; + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } - // in the browser we source default IdentityId from localStorage - if (AWS.util.isBrowser() && !self.params.IdentityId) { - var id = self.getStorage('id'); - if (id && self.params.Logins) { - var actualProviders = Object.keys(self.params.Logins); - var cachedProviders = - (self.getStorage('providers') || '').split(','); + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } - // only load ID if at least one provider used this ID before - var intersect = cachedProviders.filter(function(n) { - return actualProviders.indexOf(n) !== -1; + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions }); - if (intersect.length !== 0) { - self.params.IdentityId = id; + + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format + + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 } - } else if (id) { - self.params.IdentityId = id; - } - } - }, - /** - * @api private - */ - createClients: function() { - var clientConfig = this._clientConfig; - this.webIdentityCredentials = this.webIdentityCredentials || - new AWS.WebIdentityCredentials(this.params, clientConfig); - if (!this.cognito) { - var cognitoConfig = AWS.util.merge({}, clientConfig); - cognitoConfig.params = this.params; - this.cognito = new CognitoIdentity(cognitoConfig); + throw error; + }); } - this.sts = this.sts || new STS(clientConfig); - }, - /** - * @api private - */ - cacheId: function cacheId(data) { - this._identityId = data.IdentityId; - this.params.IdentityId = this._identityId; + const contentType = response.headers.get("content-type"); - // cache this IdentityId in browser localStorage if possible - if (AWS.util.isBrowser()) { - this.setStorage('id', data.IdentityId); + if (/application\/json/.test(contentType)) { + return response.json(); + } - if (this.params.Logins) { - this.setStorage('providers', Object.keys(this.params.Logins).join(',')); - } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); } - }, - /** - * @api private - */ - getStorage: function getStorage(key) { - return this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')]; - }, + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } - /** - * @api private - */ - setStorage: function setStorage(key, val) { - try { - this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')] = val; - } catch (_) {} - }, + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} - /** - * @api private - */ - storage: (function() { - try { - var storage = AWS.util.isBrowser() && window.localStorage !== null && typeof window.localStorage === 'object' ? - window.localStorage : {}; +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); - // Test set/remove which would throw an error in Safari's private browsing - storage['aws.test-storage'] = 'foobar'; - delete storage['aws.test-storage']; + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); - return storage; - } catch (_) { - return {}; + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); } - })() -}); + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; -/***/ }), -/* 171 */ -/***/ (function(module) { - -function isFipsRegion(region) { - return typeof region === 'string' && (region.startsWith('fips-') || region.endsWith('-fips')); -} + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; -function isGlobalRegion(region) { - return typeof region === 'string' && ['aws-global', 'aws-us-gov-global'].includes(region); + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); } -function getRealRegion(region) { - return ['fips-aws-global', 'aws-fips', 'aws-global'].includes(region) - ? 'us-east-1' - : ['fips-aws-us-gov-global', 'aws-us-gov-global'].includes(region) - ? 'us-gov-west-1' - : region.replace(/fips-(dkr-|prod-)?|-fips/, ''); -} +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); -module.exports = { - isFipsRegion: isFipsRegion, - isGlobalRegion: isGlobalRegion, - getRealRegion: getRealRegion -}; +exports.request = request; +//# sourceMappingURL=index.js.map /***/ }), -/* 172 */, -/* 173 */, -/* 174 */, -/* 175 */ -/***/ (function(module, __unusedexports, __webpack_require__) { -var util = __webpack_require__(395).util; -var toBuffer = util.buffer.toBuffer; +/***/ 8291: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// All prelude components are unsigned, 32-bit integers -var PRELUDE_MEMBER_LENGTH = 4; -// The prelude consists of two components -var PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; -// Checksums are always CRC32 hashes. -var CHECKSUM_LENGTH = 4; -// Messages must include a full prelude, a prelude checksum, and a message checksum -var MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +__nccwpck_require__(3639); +var AWS = __nccwpck_require__(8437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; -/** - * @api private - * - * @param {Buffer} message - */ -function splitMessage(message) { - if (!util.Buffer.isBuffer(message)) message = toBuffer(message); +apiLoader.services['cognitoidentity'] = {}; +AWS.CognitoIdentity = Service.defineService('cognitoidentity', ['2014-06-30']); +Object.defineProperty(apiLoader.services['cognitoidentity'], '2014-06-30', { + get: function get() { + var model = __nccwpck_require__(7377); + model.paginators = (__nccwpck_require__(5010)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); - if (message.length < MINIMUM_MESSAGE_LENGTH) { - throw new Error('Provided message too short to accommodate event stream message overhead'); - } +module.exports = AWS.CognitoIdentity; - if (message.length !== message.readUInt32BE(0)) { - throw new Error('Reported message length does not match received message length'); - } - var expectedPreludeChecksum = message.readUInt32BE(PRELUDE_LENGTH); +/***/ }), - if ( - expectedPreludeChecksum !== util.crypto.crc32( - message.slice(0, PRELUDE_LENGTH) - ) - ) { - throw new Error( - 'The prelude checksum specified in the message (' + - expectedPreludeChecksum + - ') does not match the calculated CRC32 checksum.' - ); - } +/***/ 3256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var expectedMessageChecksum = message.readUInt32BE(message.length - CHECKSUM_LENGTH); +__nccwpck_require__(3639); +var AWS = __nccwpck_require__(8437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; - if ( - expectedMessageChecksum !== util.crypto.crc32( - message.slice(0, message.length - CHECKSUM_LENGTH) - ) - ) { - throw new Error( - 'The message checksum did not match the expected value of ' + - expectedMessageChecksum - ); - } +apiLoader.services['s3'] = {}; +AWS.S3 = Service.defineService('s3', ['2006-03-01']); +__nccwpck_require__(6543); +Object.defineProperty(apiLoader.services['s3'], '2006-03-01', { + get: function get() { + var model = __nccwpck_require__(1129); + model.paginators = (__nccwpck_require__(7265)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(4048)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); - var headersStart = PRELUDE_LENGTH + CHECKSUM_LENGTH; - var headersEnd = headersStart + message.readUInt32BE(PRELUDE_MEMBER_LENGTH); +module.exports = AWS.S3; - return { - headers: message.slice(headersStart, headersEnd), - body: message.slice(headersEnd, message.length - CHECKSUM_LENGTH), - }; -} -/** - * @api private - */ -module.exports = { - splitMessage: splitMessage -}; +/***/ }), +/***/ 7513: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/***/ }), -/* 176 */, -/* 177 */, -/* 178 */, -/* 179 */, -/* 180 */, -/* 181 */, -/* 182 */, -/* 183 */, -/* 184 */, -/* 185 */, -/* 186 */, -/* 187 */ -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); -__webpack_require__(923); -__webpack_require__(906); -var PromisesDependency; +__nccwpck_require__(3639); +var AWS = __nccwpck_require__(8437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; -/** - * The main configuration class used by all service objects to set - * the region, credentials, and other options for requests. - * - * By default, credentials and region settings are left unconfigured. - * This should be configured by the application before using any +apiLoader.services['sts'] = {}; +AWS.STS = Service.defineService('sts', ['2011-06-15']); +__nccwpck_require__(1055); +Object.defineProperty(apiLoader.services['sts'], '2011-06-15', { + get: function get() { + var model = __nccwpck_require__(753); + model.paginators = (__nccwpck_require__(6314)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.STS; + + +/***/ }), + +/***/ 2793: +/***/ ((module) => { + +function apiLoader(svc, version) { + if (!apiLoader.services.hasOwnProperty(svc)) { + throw new Error('InvalidService: Failed to load api for ' + svc); + } + return apiLoader.services[svc][version]; +} + +/** + * @api private + * + * This member of AWS.apiLoader is private, but changing it will necessitate a + * change to ../scripts/services-table-generator.ts + */ +apiLoader.services = {}; + +/** + * @api private + */ +module.exports = apiLoader; + + +/***/ }), + +/***/ 8110: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(8437); +__nccwpck_require__(3819); +__nccwpck_require__(6965); +var PromisesDependency; + +/** + * The main configuration class used by all service objects to set + * the region, credentials, and other options for requests. + * + * By default, credentials and region settings are left unconfigured. + * This should be configured by the application before using any * AWS service APIs. * * In order to set global configuration options, properties should @@ -9817,448 +6983,11 @@ AWS.config = new AWS.Config(); /***/ }), -/* 188 */, -/* 189 */, -/* 190 */, -/* 191 */ -/***/ (function(module) { - -module.exports = require("querystring"); - -/***/ }), -/* 192 */, -/* 193 */, -/* 194 */, -/* 195 */, -/* 196 */, -/* 197 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} - -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} - -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid - - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() - - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g - - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 - - return ret -} - - -/***/ }), -/* 198 */, -/* 199 */, -/* 200 */ -/***/ (function(module) { - -module.exports = require("dgram"); - -/***/ }), -/* 201 */, -/* 202 */, -/* 203 */, -/* 204 */, -/* 205 */, -/* 206 */, -/* 207 */, -/* 208 */, -/* 209 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = void 0; - -var _v = _interopRequireDefault(__webpack_require__(212)); - -var _md = _interopRequireDefault(__webpack_require__(803)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports.default = _default; - -/***/ }), -/* 210 */ -/***/ (function(__unusedmodule, exports) { - -// Generated by CoffeeScript 1.12.7 -(function() { - "use strict"; - exports.stripBOM = function(str) { - if (str[0] === '\uFEFF') { - return str.substring(1); - } else { - return str; - } - }; - -}).call(this); - - -/***/ }), -/* 211 */ -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), -/* 212 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _default; -exports.URL = exports.DNS = void 0; - -var _bytesToUuid = _interopRequireDefault(__webpack_require__(390)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function uuidToBytes(uuid) { - // Note: We assume we're being passed a valid uuid string - var bytes = []; - uuid.replace(/[a-fA-F0-9]{2}/g, function (hex) { - bytes.push(parseInt(hex, 16)); - }); - return bytes; -} - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - var bytes = new Array(str.length); - - for (var i = 0; i < str.length; i++) { - bytes[i] = str.charCodeAt(i); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - var generateUUID = function (value, namespace, buf, offset) { - var off = buf && offset || 0; - if (typeof value == 'string') value = stringToBytes(value); - if (typeof namespace == 'string') namespace = uuidToBytes(namespace); - if (!Array.isArray(value)) throw TypeError('value must be an array of bytes'); - if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values'); // Per 4.3 - - var bytes = hashfunc(namespace.concat(value)); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - for (var idx = 0; idx < 16; ++idx) { - buf[off + idx] = bytes[idx]; - } - } - - return buf || (0, _bytesToUuid.default)(bytes); - }; // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), -/* 213 */ -/***/ (function(module) { - -module.exports = require("timers"); - -/***/ }), -/* 214 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -__webpack_require__(234); -var AWS = __webpack_require__(395); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cognitoidentity'] = {}; -AWS.CognitoIdentity = Service.defineService('cognitoidentity', ['2014-06-30']); -Object.defineProperty(apiLoader.services['cognitoidentity'], '2014-06-30', { - get: function get() { - var model = __webpack_require__(56); - model.paginators = __webpack_require__(371).pagination; - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CognitoIdentity; - - -/***/ }), -/* 215 */, -/* 216 */, -/* 217 */, -/* 218 */, -/* 219 */, -/* 220 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var AWS = __webpack_require__(395); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -AWS.Signers.V2 = inherit(AWS.Signers.RequestSigner, { - addAuthorization: function addAuthorization(credentials, date) { - - if (!date) date = AWS.util.date.getDate(); - - var r = this.request; - - r.params.Timestamp = AWS.util.date.iso8601(date); - r.params.SignatureVersion = '2'; - r.params.SignatureMethod = 'HmacSHA256'; - r.params.AWSAccessKeyId = credentials.accessKeyId; - - if (credentials.sessionToken) { - r.params.SecurityToken = credentials.sessionToken; - } - - delete r.params.Signature; // delete old Signature for re-signing - r.params.Signature = this.signature(credentials); - - r.body = AWS.util.queryParamsToString(r.params); - r.headers['Content-Length'] = r.body.length; - }, - - signature: function signature(credentials) { - return AWS.util.crypto.hmac(credentials.secretAccessKey, this.stringToSign(), 'base64'); - }, - - stringToSign: function stringToSign() { - var parts = []; - parts.push(this.request.method); - parts.push(this.request.endpoint.host.toLowerCase()); - parts.push(this.request.pathname()); - parts.push(AWS.util.queryParamsToString(this.request.params)); - return parts.join('\n'); - } - -}); - -/** - * @api private - */ -module.exports = AWS.Signers.V2; - - -/***/ }), -/* 221 */, -/* 222 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const s3Client_1 = __importDefault(__webpack_require__(882)); -exports.default = (bucketName) => __awaiter(void 0, void 0, void 0, function* () { - try { - yield s3Client_1.default.headBucket({ Bucket: bucketName }).promise(); - return true; - } - catch (e) { - console.log(e); - return false; - } -}); - - -/***/ }), -/* 223 */, -/* 224 */, -/* 225 */, -/* 226 */, -/* 227 */, -/* 228 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.requiredEnvVars = void 0; -const github = __importStar(__webpack_require__(469)); -const githubCore = __importStar(__webpack_require__(470)); -const s3Client_1 = __importDefault(__webpack_require__(882)); -const s3UploadDirectory_1 = __importDefault(__webpack_require__(704)); -const validateEnvVars_1 = __importDefault(__webpack_require__(732)); -const checkBucketExists_1 = __importDefault(__webpack_require__(222)); -const githubClient_1 = __importDefault(__webpack_require__(119)); -const deactivateDeployments_1 = __importDefault(__webpack_require__(14)); -const dayjs_1 = __importDefault(__webpack_require__(874)); -exports.requiredEnvVars = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'GITHUB_TOKEN']; -exports.default = (bucketName, uploadDirectory, environmentPrefix) => __awaiter(void 0, void 0, void 0, function* () { - const websiteUrl = `https://s3.amazonaws.com/${bucketName}/index.html`; - const { repo } = github.context; - const branchName = github.context.ref; - validateEnvVars_1.default(exports.requiredEnvVars); - const bucketExists = yield checkBucketExists_1.default(bucketName); - if (!bucketExists) { - console.log(`S3 bucket does not exist. Creating ${bucketName}...`); - yield s3Client_1.default.createBucket({ Bucket: bucketName }).promise(); - yield s3Client_1.default.putBucketOwnershipControls({ - Bucket: bucketName, - OwnershipControls: { - Rules: [ - { - ObjectOwnership: 'ObjectWriter' - } - ] - } - }).promise(); - yield s3Client_1.default.putPublicAccessBlock({ - Bucket: bucketName, - PublicAccessBlockConfiguration: { - BlockPublicAcls: false, - BlockPublicPolicy: false, - IgnorePublicAcls: false, - RestrictPublicBuckets: false - } - }).promise(); - console.log('Configuring bucket website...'); - yield s3Client_1.default.putBucketWebsite({ - Bucket: bucketName, - WebsiteConfiguration: { - IndexDocument: { Suffix: 'index.html' }, - ErrorDocument: { Key: 'index.html' } - } - }).promise(); - } - else { - console.log('S3 Bucket already exists. Skipping creation...'); - } - yield deactivateDeployments_1.default(repo, environmentPrefix); - const deployment = yield githubClient_1.default.repos.createDeployment(Object.assign(Object.assign({}, repo), { ref: `${branchName}`, environment: `${environmentPrefix || 'ACTION-'}${dayjs_1.default().format('DD-MM-YYYY-hh:mma')}`, auto_merge: false, transient_environment: true, required_contexts: [] })); - if (isSuccessResponse(deployment.data)) { - yield githubClient_1.default.repos.createDeploymentStatus(Object.assign(Object.assign({}, repo), { deployment_id: deployment.data.id, state: 'in_progress' })); - console.log('Uploading files...'); - yield s3UploadDirectory_1.default(bucketName, uploadDirectory); - yield githubClient_1.default.repos.createDeploymentStatus(Object.assign(Object.assign({}, repo), { deployment_id: deployment.data.id, state: 'success', environment_url: websiteUrl })); - githubCore.setOutput('websiteUrl', websiteUrl); - console.log(`Website URL: ${websiteUrl}`); - } -}); -function isSuccessResponse(object) { - return 'id' in object; -} - - -/***/ }), -/* 229 */ -/***/ (function(module) { - -module.exports = require("domain"); - -/***/ }), -/* 230 */, -/* 231 */, -/* 232 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 5566: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var AWS = __webpack_require__(395); +var AWS = __nccwpck_require__(8437); /** * @api private */ @@ -10328,2591 +7057,2929 @@ module.exports = resolveRegionalEndpointsFlag; /***/ }), -/* 233 */, -/* 234 */ -/***/ (function(module, __unusedexports, __webpack_require__) { -var util = __webpack_require__(153); +/***/ 8437: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var region_utils = __webpack_require__(171); -var isFipsRegion = region_utils.isFipsRegion; -var getRealRegion = region_utils.getRealRegion; - -util.isBrowser = function() { return false; }; -util.isNode = function() { return true; }; - -// node.js specific modules -util.crypto.lib = __webpack_require__(417); -util.Buffer = __webpack_require__(293).Buffer; -util.domain = __webpack_require__(229); -util.stream = __webpack_require__(794); -util.url = __webpack_require__(835); -util.querystring = __webpack_require__(191); -util.environment = 'nodejs'; -util.createEventStream = util.stream.Readable ? - __webpack_require__(445).createEventStream : __webpack_require__(661).createEventStream; -util.realClock = __webpack_require__(693); -util.clientSideMonitoring = { - Publisher: __webpack_require__(701).Publisher, - configProvider: __webpack_require__(762), -}; -util.iniLoader = __webpack_require__(892).iniLoader; -util.getSystemErrorName = __webpack_require__(669).getSystemErrorName; - -util.loadConfig = function(options) { - var envValue = options.environmentVariableSelector(process.env); - if (envValue !== undefined) { - return envValue; - } - - var configFile = {}; - try { - configFile = util.iniLoader ? util.iniLoader.loadFrom({ - isConfig: true, - filename: process.env[util.sharedConfigFileEnv] - }) : {}; - } catch (e) {} - var sharedFileConfig = configFile[ - process.env.AWS_PROFILE || util.defaultProfile - ] || {}; - var configValue = options.configFileSelector(sharedFileConfig); - if (configValue !== undefined) { - return configValue; - } - - if (typeof options.default === 'function') { - return options.default(); - } - return options.default; -}; - -var AWS; +/** + * The main AWS namespace + */ +var AWS = { util: __nccwpck_require__(7985) }; /** * @api private + * @!macro [new] nobrowser + * @note This feature is not supported in the browser environment of the SDK. */ -module.exports = AWS = __webpack_require__(395); - -__webpack_require__(923); -__webpack_require__(906); -__webpack_require__(43); -__webpack_require__(543); -__webpack_require__(306); -__webpack_require__(170); -__webpack_require__(540); -__webpack_require__(982); - -// Load the xml2js XML parser -AWS.XML.Parser = __webpack_require__(810); - -// Load Node HTTP client -__webpack_require__(888); - -__webpack_require__(960); - -// Load custom credential providers -__webpack_require__(868); -__webpack_require__(103); -__webpack_require__(426); -__webpack_require__(316); -__webpack_require__(872); -__webpack_require__(634); -__webpack_require__(22); -__webpack_require__(982); -__webpack_require__(114); - -// Setup default providers for credentials chain -// If this changes, please update documentation for -// AWS.CredentialProviderChain.defaultProviders in -// credentials/credential_provider_chain.js -AWS.CredentialProviderChain.defaultProviders = [ - function () { return new AWS.EnvironmentCredentials('AWS'); }, - function () { return new AWS.EnvironmentCredentials('AMAZON'); }, - function () { return new AWS.SsoCredentials(); }, - function () { return new AWS.SharedIniFileCredentials(); }, - function () { return new AWS.ECSCredentials(); }, - function () { return new AWS.ProcessCredentials(); }, - function () { return new AWS.TokenFileWebIdentityCredentials(); }, - function () { return new AWS.EC2MetadataCredentials(); } -]; +var _hidden = {}; _hidden.toString(); // hack to parse macro -// Load custom token providers -__webpack_require__(154); -__webpack_require__(115); -__webpack_require__(858); +/** + * @api private + */ +module.exports = AWS; -// Setup default providers for token chain -// If this changes, please update documentation for -// AWS.TokenProviderChain.defaultProviders in -// token/token_provider_chain.js -AWS.TokenProviderChain.defaultProviders = [ - function () { return new AWS.SSOTokenProvider(); }, -]; +AWS.util.update(AWS, { -var getRegion = function() { - var env = process.env; - var region = env.AWS_REGION || env.AMAZON_REGION; - if (env[AWS.util.configOptInEnv]) { - var toCheck = [ - {filename: env[AWS.util.sharedCredentialsFileEnv]}, - {isConfig: true, filename: env[AWS.util.sharedConfigFileEnv]} - ]; - var iniLoader = AWS.util.iniLoader; - while (!region && toCheck.length) { - var configFile = {}; - var fileInfo = toCheck.shift(); - try { - configFile = iniLoader.loadFrom(fileInfo); - } catch (err) { - if (fileInfo.isConfig) throw err; - } - var profile = configFile[env.AWS_PROFILE || AWS.util.defaultProfile]; - region = profile && profile.region; - } - } - return region; -}; + /** + * @constant + */ + VERSION: '2.1371.0', -var getBooleanValue = function(value) { - return value === 'true' ? true: value === 'false' ? false: undefined; -}; + /** + * @api private + */ + Signers: {}, -var USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: function(env) { - return getBooleanValue(env['AWS_USE_FIPS_ENDPOINT']); - }, - configFileSelector: function(profile) { - return getBooleanValue(profile['use_fips_endpoint']); + /** + * @api private + */ + Protocol: { + Json: __nccwpck_require__(83), + Query: __nccwpck_require__(761), + Rest: __nccwpck_require__(8200), + RestJson: __nccwpck_require__(5883), + RestXml: __nccwpck_require__(5143) }, - default: false, -}; -var USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: function(env) { - return getBooleanValue(env['AWS_USE_DUALSTACK_ENDPOINT']); - }, - configFileSelector: function(profile) { - return getBooleanValue(profile['use_dualstack_endpoint']); + /** + * @api private + */ + XML: { + Builder: __nccwpck_require__(3546), + Parser: null // conditionally set based on environment }, - default: false, -}; -// Update configuration keys -AWS.util.update(AWS.Config.prototype.keys, { - credentials: function () { - var credentials = null; - new AWS.CredentialProviderChain([ - function () { return new AWS.EnvironmentCredentials('AWS'); }, - function () { return new AWS.EnvironmentCredentials('AMAZON'); }, - function () { return new AWS.SharedIniFileCredentials({ disableAssumeRole: true }); } - ]).resolve(function(err, creds) { - if (!err) credentials = creds; - }); - return credentials; - }, - credentialProvider: function() { - return new AWS.CredentialProviderChain(); - }, - logger: function () { - return process.env.AWSJS_DEBUG ? console : null; - }, - region: function() { - var region = getRegion(); - return region ? getRealRegion(region): undefined; - }, - tokenProvider: function() { - return new AWS.TokenProviderChain(); + /** + * @api private + */ + JSON: { + Builder: __nccwpck_require__(7495), + Parser: __nccwpck_require__(5474) }, - useFipsEndpoint: function() { - var region = getRegion(); - return isFipsRegion(region) - ? true - : util.loadConfig(USE_FIPS_ENDPOINT_CONFIG_OPTIONS); + + /** + * @api private + */ + Model: { + Api: __nccwpck_require__(7657), + Operation: __nccwpck_require__(8083), + Shape: __nccwpck_require__(1349), + Paginator: __nccwpck_require__(5938), + ResourceWaiter: __nccwpck_require__(1368) }, - useDualstackEndpoint: function() { - return util.loadConfig(USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS); - } + + /** + * @api private + */ + apiLoader: __nccwpck_require__(2793), + + /** + * @api private + */ + EndpointCache: (__nccwpck_require__(6323)/* .EndpointCache */ .$) }); +__nccwpck_require__(5948); +__nccwpck_require__(8903); +__nccwpck_require__(8110); +__nccwpck_require__(1556); +__nccwpck_require__(4995); +__nccwpck_require__(8652); +__nccwpck_require__(8743); +__nccwpck_require__(7246); +__nccwpck_require__(9897); +__nccwpck_require__(9127); +__nccwpck_require__(3985); -// Reset configuration -AWS.config = new AWS.Config(); +/** + * @readonly + * @return [AWS.SequentialExecutor] a collection of global event listeners that + * are attached to every sent request. + * @see AWS.Request AWS.Request for a list of events to listen for + * @example Logging the time taken to send a request + * AWS.events.on('send', function startSend(resp) { + * resp.startTime = new Date().getTime(); + * }).on('complete', function calculateTime(resp) { + * var time = (new Date().getTime() - resp.startTime) / 1000; + * console.log('Request took ' + time + ' seconds'); + * }); + * + * new AWS.S3().listBuckets(); // prints 'Request took 0.285 seconds' + */ +AWS.events = new AWS.SequentialExecutor(); + +//create endpoint cache lazily +AWS.util.memoizedProperty(AWS, 'endpointCache', function() { + return new AWS.EndpointCache(AWS.config.endpointCacheSize); +}, true); /***/ }), -/* 235 */, -/* 236 */, -/* 237 */, -/* 238 */, -/* 239 */, -/* 240 */, -/* 241 */, -/* 242 */, -/* 243 */, -/* 244 */, -/* 245 */, -/* 246 */, -/* 247 */, -/* 248 */, -/* 249 */, -/* 250 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { -"use strict"; +/***/ 3819: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(8437); +/** + * Represents your AWS security credentials, specifically the + * {accessKeyId}, {secretAccessKey}, and optional {sessionToken}. + * Creating a `Credentials` object allows you to pass around your + * security information to configuration and service objects. + * + * Note that this class typically does not need to be constructed manually, + * as the {AWS.Config} and {AWS.Service} classes both accept simple + * options hashes with the three keys. These structures will be converted + * into Credentials objects automatically. + * + * ## Expiring and Refreshing Credentials + * + * Occasionally credentials can expire in the middle of a long-running + * application. In this case, the SDK will automatically attempt to + * refresh the credentials from the storage location if the Credentials + * class implements the {refresh} method. + * + * If you are implementing a credential storage location, you + * will want to create a subclass of the `Credentials` class and + * override the {refresh} method. This method allows credentials to be + * retrieved from the backing store, be it a file system, database, or + * some network storage. The method should reset the credential attributes + * on the object. + * + * @!attribute expired + * @return [Boolean] whether the credentials have been expired and + * require a refresh. Used in conjunction with {expireTime}. + * @!attribute expireTime + * @return [Date] a time when credentials should be considered expired. Used + * in conjunction with {expired}. + * @!attribute accessKeyId + * @return [String] the AWS access key ID + * @!attribute secretAccessKey + * @return [String] the AWS secret access key + * @!attribute sessionToken + * @return [String] an optional AWS session token + */ +AWS.Credentials = AWS.util.inherit({ + /** + * A credentials object can be created using positional arguments or an options + * hash. + * + * @overload AWS.Credentials(accessKeyId, secretAccessKey, sessionToken=null) + * Creates a Credentials object with a given set of credential information + * as positional arguments. + * @param accessKeyId [String] the AWS access key ID + * @param secretAccessKey [String] the AWS secret access key + * @param sessionToken [String] the optional AWS session token + * @example Create a credentials object with AWS credentials + * var creds = new AWS.Credentials('akid', 'secret', 'session'); + * @overload AWS.Credentials(options) + * Creates a Credentials object with a given set of credential information + * as an options hash. + * @option options accessKeyId [String] the AWS access key ID + * @option options secretAccessKey [String] the AWS secret access key + * @option options sessionToken [String] the optional AWS session token + * @example Create a credentials object with AWS credentials + * var creds = new AWS.Credentials({ + * accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'session' + * }); + */ + constructor: function Credentials() { + // hide secretAccessKey from being displayed with util.inspect + AWS.util.hideProperties(this, ['secretAccessKey']); -Object.defineProperty(exports, '__esModule', { value: true }); + this.expired = false; + this.expireTime = null; + this.refreshCallbacks = []; + if (arguments.length === 1 && typeof arguments[0] === 'object') { + var creds = arguments[0].credentials || arguments[0]; + this.accessKeyId = creds.accessKeyId; + this.secretAccessKey = creds.secretAccessKey; + this.sessionToken = creds.sessionToken; + } else { + this.accessKeyId = arguments[0]; + this.secretAccessKey = arguments[1]; + this.sessionToken = arguments[2]; + } + }, -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + /** + * @return [Integer] the number of seconds before {expireTime} during which + * the credentials will be considered expired. + */ + expiryWindow: 15, -var osName = _interopDefault(__webpack_require__(2)); + /** + * @return [Boolean] whether the credentials object should call {refresh} + * @note Subclasses should override this method to provide custom refresh + * logic. + */ + needsRefresh: function needsRefresh() { + var currentTime = AWS.util.date.getDate().getTime(); + var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; + if (this.expireTime && adjustedTime > this.expireTime) { + return true; + } else { + return this.expired || !this.accessKeyId || !this.secretAccessKey; } + }, - return ""; - } -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map + /** + * Gets the existing credentials, refreshing them if they are not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload credentials when they are already + * loaded into the object. + * + * @callback callback function(err) + * When this callback is called with no error, it means either credentials + * do not need to be refreshed or refreshed credentials information has + * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + */ + get: function get(callback) { + var self = this; + if (this.needsRefresh()) { + this.refresh(function(err) { + if (!err) self.expired = false; // reset expired flag + if (callback) callback(err); + }); + } else if (callback) { + callback(); + } + }, + /** + * @!method getPromise() + * Returns a 'thenable' promise. + * Gets the existing credentials, refreshing them if they are not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload credentials when they are already + * loaded into the object. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it + * means either credentials do not need to be refreshed or refreshed + * credentials information has been loaded into the object (as the + * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled + * @return [Promise] A promise that represents the state of the `get` call. + * @example Calling the `getPromise` method. + * var promise = credProvider.getPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ -/***/ }), -/* 251 */, -/* 252 */, -/* 253 */, -/* 254 */, -/* 255 */, -/* 256 */, -/* 257 */, -/* 258 */, -/* 259 */, -/* 260 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + /** + * @!method refreshPromise() + * Returns a 'thenable' promise. + * Refreshes the credentials. Users should call {get} before attempting + * to forcibly refresh credentials. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it + * means refreshed credentials information has been loaded into the object + * (as the `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled + * @return [Promise] A promise that represents the state of the `refresh` call. + * @example Calling the `refreshPromise` method. + * var promise = credProvider.refreshPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __webpack_require__(357) -var signals = __webpack_require__(654) + /** + * Refreshes the credentials. Users should call {get} before attempting + * to forcibly refresh credentials. + * + * @callback callback function(err) + * When this callback is called with no error, it means refreshed + * credentials information has been loaded into the object (as the + * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @note Subclasses should override this class to reset the + * {accessKeyId}, {secretAccessKey} and optional {sessionToken} + * on the credentials object and then call the callback with + * any error information. + * @see get + */ + refresh: function refresh(callback) { + this.expired = false; + callback(); + }, -var EE = __webpack_require__(614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter -} + /** + * @api private + * @param callback + */ + coalesceRefresh: function coalesceRefresh(callback, sync) { + var self = this; + if (self.refreshCallbacks.push(callback) === 1) { + self.load(function onLoad(err) { + AWS.util.arrayEach(self.refreshCallbacks, function(callback) { + if (sync) { + callback(err); + } else { + // callback could throw, so defer to ensure all callbacks are notified + AWS.util.defer(function () { + callback(err); + }); + } + }); + self.refreshCallbacks.length = 0; + }); + } + }, -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} -} + /** + * @api private + * @param callback + */ + load: function load(callback) { + callback(); + } +}); -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true -} +/** + * @api private + */ +AWS.Credentials.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); + this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); +}; -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') +/** + * @api private + */ +AWS.Credentials.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.getPromise; + delete this.prototype.refreshPromise; +}; - if (loaded === false) { - load() - } +AWS.util.addPromises(AWS.Credentials); - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() - } - } - emitter.on(ev, cb) +/***/ }), - return remove -} +/***/ 7083: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports.unload = unload -function unload () { - if (!loaded) { - return - } - loaded = false +var AWS = __nccwpck_require__(8437); +var STS = __nccwpck_require__(7513); - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} - -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) -} +/** + * Represents temporary credentials retrieved from {AWS.STS}. Without any + * extra parameters, credentials will be fetched from the + * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the + * {AWS.STS.assumeRole} operation will be used to fetch credentials for the + * role instead. + * + * AWS.ChainableTemporaryCredentials differs from AWS.TemporaryCredentials in + * the way masterCredentials and refreshes are handled. + * AWS.ChainableTemporaryCredentials refreshes expired credentials using the + * masterCredentials passed by the user to support chaining of STS credentials. + * However, AWS.TemporaryCredentials recursively collapses the masterCredentials + * during instantiation, precluding the ability to refresh credentials which + * require intermediate, temporary credentials. + * + * For example, if the application should use RoleA, which must be assumed from + * RoleB, and the environment provides credentials which can assume RoleB, then + * AWS.ChainableTemporaryCredentials must be used to support refreshing the + * temporary credentials for RoleA: + * + * ```javascript + * var roleACreds = new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: 'RoleA'}, + * masterCredentials: new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: 'RoleB'}, + * masterCredentials: new AWS.EnvironmentCredentials('AWS') + * }) + * }); + * ``` + * + * If AWS.TemporaryCredentials had been used in the previous example, + * `roleACreds` would fail to refresh because `roleACreds` would + * use the environment credentials for the AssumeRole request. + * + * Another difference is that AWS.ChainableTemporaryCredentials creates the STS + * service instance during instantiation while AWS.TemporaryCredentials creates + * the STS service instance during the first refresh. Creating the service + * instance during instantiation effectively captures the master credentials + * from the global config, so that subsequent changes to the global config do + * not affect the master credentials used to refresh the temporary credentials. + * + * This allows an instance of AWS.ChainableTemporaryCredentials to be assigned + * to AWS.config.credentials: + * + * ```javascript + * var envCreds = new AWS.EnvironmentCredentials('AWS'); + * AWS.config.credentials = envCreds; + * // masterCredentials will be envCreds + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: '...'} + * }); + * ``` + * + * Similarly, to use the CredentialProviderChain's default providers as the + * master credentials, simply create a new instance of + * AWS.ChainableTemporaryCredentials: + * + * ```javascript + * AWS.config.credentials = new ChainableTemporaryCredentials({ + * params: {RoleArn: '...'} + * }); + * ``` + * + * @!attribute service + * @return [AWS.STS] the STS service instance used to + * get and refresh temporary credentials from AWS STS. + * @note (see constructor) + */ +AWS.ChainableTemporaryCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new temporary credentials object. + * + * @param options [map] a set of options + * @option options params [map] ({}) a map of options that are passed to the + * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. + * If a `RoleArn` parameter is passed in, credentials will be based on the + * IAM role. If a `SerialNumber` parameter is passed in, {tokenCodeFn} must + * also be passed in or an error will be thrown. + * @option options masterCredentials [AWS.Credentials] the master credentials + * used to get and refresh temporary credentials from AWS STS. By default, + * AWS.config.credentials or AWS.config.credentialProvider will be used. + * @option options tokenCodeFn [Function] (null) Function to provide + * `TokenCode`, if `SerialNumber` is provided for profile in {params}. Function + * is called with value of `SerialNumber` and `callback`, and should provide + * the `TokenCode` or an error to the callback in the format + * `callback(err, token)`. + * @example Creating a new credentials object for generic temporary credentials + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials(); + * @example Creating a new credentials object for an IAM role + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ + * params: { + * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials' + * } + * }); + * @see AWS.STS.assumeRole + * @see AWS.STS.getSessionToken + */ + constructor: function ChainableTemporaryCredentials(options) { + AWS.Credentials.call(this); + options = options || {}; + this.errorCode = 'ChainableTemporaryCredentialsProviderFailure'; + this.expired = true; + this.tokenCodeFn = null; -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - process.kill(process.pid, sig) + var params = AWS.util.copy(options.params) || {}; + if (params.RoleArn) { + params.RoleSessionName = params.RoleSessionName || 'temporary-credentials'; } - } -}) - -module.exports.signals = function () { - return signals -} - -module.exports.load = load - -var loaded = false - -function load () { - if (loaded) { - return - } - loaded = true - - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false + if (params.SerialNumber) { + if (!options.tokenCodeFn || (typeof options.tokenCodeFn !== 'function')) { + throw new AWS.util.error( + new Error('tokenCodeFn must be a function when params.SerialNumber is given'), + {code: this.errorCode} + ); + } else { + this.tokenCodeFn = options.tokenCodeFn; + } } - }) + var config = AWS.util.merge( + { + params: params, + credentials: options.masterCredentials || AWS.config.credentials + }, + options.stsConfig || {} + ); + this.service = new STS(config); + }, - process.emit = processEmit - process.reallyExit = processReallyExit -} + /** + * Refreshes credentials using {AWS.STS.assumeRole} or + * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed + * to the credentials {constructor}. + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see AWS.Credentials.get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) -} + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + var operation = self.service.config.params.RoleArn ? 'assumeRole' : 'getSessionToken'; + this.getTokenCode(function (err, tokenCode) { + var params = {}; + if (err) { + callback(err); + return; + } + if (tokenCode) { + params.TokenCode = tokenCode; + } + self.service[operation](params, function (err, data) { + if (!err) { + self.service.credentialsFrom(data, self); + } + callback(err); + }); + }); + }, -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg + /** + * @api private + */ + getTokenCode: function getTokenCode(callback) { + var self = this; + if (this.tokenCodeFn) { + this.tokenCodeFn(this.service.config.params.SerialNumber, function (err, token) { + if (err) { + var message = err; + if (err instanceof Error) { + message = err.message; + } + callback( + AWS.util.error( + new Error('Error fetching MFA token: ' + message), + { code: self.errorCode} + ) + ); + return; + } + callback(null, token); + }); + } else { + callback(null); } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) } -} - - -/***/ }), -/* 261 */, -/* 262 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; +}); -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Context = void 0; -const fs_1 = __webpack_require__(747); -const os_1 = __webpack_require__(87); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map /***/ }), -/* 263 */, -/* 264 */, -/* 265 */ -/***/ (function(module, __unusedexports, __webpack_require__) { -var property = __webpack_require__(153).property; +/***/ 3498: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { -function Paginator(name, paginator) { - property(this, 'inputToken', paginator.input_token); - property(this, 'limitKey', paginator.limit_key); - property(this, 'moreResults', paginator.more_results); - property(this, 'outputToken', paginator.output_token); - property(this, 'resultKey', paginator.result_key); -} +var AWS = __nccwpck_require__(8437); +var CognitoIdentity = __nccwpck_require__(8291); +var STS = __nccwpck_require__(7513); /** - * @api private + * Represents credentials retrieved from STS Web Identity Federation using + * the Amazon Cognito Identity service. + * + * By default this provider gets credentials using the + * {AWS.CognitoIdentity.getCredentialsForIdentity} service operation, which + * requires either an `IdentityId` or an `IdentityPoolId` (Amazon Cognito + * Identity Pool ID), which is used to call {AWS.CognitoIdentity.getId} to + * obtain an `IdentityId`. If the identity or identity pool is not configured in + * the Amazon Cognito Console to use IAM roles with the appropriate permissions, + * then additionally a `RoleArn` is required containing the ARN of the IAM trust + * policy for the Amazon Cognito role that the user will log into. If a `RoleArn` + * is provided, then this provider gets credentials using the + * {AWS.STS.assumeRoleWithWebIdentity} service operation, after first getting an + * Open ID token from {AWS.CognitoIdentity.getOpenIdToken}. + * + * In addition, if this credential provider is used to provide authenticated + * login, the `Logins` map may be set to the tokens provided by the respective + * identity providers. See {constructor} for an example on creating a credentials + * object with proper property values. + * + * ## Refreshing Credentials from Identity Service + * + * In addition to AWS credentials expiring after a given amount of time, the + * login token from the identity provider will also expire. Once this token + * expires, it will not be usable to refresh AWS credentials, and another + * token will be needed. The SDK does not manage refreshing of the token value, + * but this can be done through a "refresh token" supported by most identity + * providers. Consult the documentation for the identity provider for refreshing + * tokens. Once the refreshed token is acquired, you should make sure to update + * this new token in the credentials object's {params} property. The following + * code will update the WebIdentityToken, assuming you have retrieved an updated + * token from the identity provider: + * + * ```javascript + * AWS.config.credentials.params.Logins['graph.facebook.com'] = updatedToken; + * ``` + * + * Future calls to `credentials.refresh()` will now use the new token. + * + * @!attribute params + * @return [map] the map of params passed to + * {AWS.CognitoIdentity.getId}, + * {AWS.CognitoIdentity.getOpenIdToken}, and + * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the + * `params.WebIdentityToken` property. + * @!attribute data + * @return [map] the raw data response from the call to + * {AWS.CognitoIdentity.getCredentialsForIdentity}, or + * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get + * access to other properties from the response. + * @!attribute identityId + * @return [String] the Cognito ID returned by the last call to + * {AWS.CognitoIdentity.getOpenIdToken}. This ID represents the actual + * final resolved identity ID from Amazon Cognito. */ -module.exports = Paginator; - - -/***/ }), -/* 266 */, -/* 267 */, -/* 268 */, -/* 269 */, -/* 270 */ -/***/ (function(module) { +AWS.CognitoIdentityCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * @api private + */ + localStorageKey: { + id: 'aws.cognito.identity-id.', + providers: 'aws.cognito.identity-providers.' + }, -module.exports = {"pagination":{}}; + /** + * Creates a new credentials object. + * @example Creating a new credentials object + * AWS.config.credentials = new AWS.CognitoIdentityCredentials({ + * + * // either IdentityPoolId or IdentityId is required + * // See the IdentityPoolId param for AWS.CognitoIdentity.getID (linked below) + * // See the IdentityId param for AWS.CognitoIdentity.getCredentialsForIdentity + * // or AWS.CognitoIdentity.getOpenIdToken (linked below) + * IdentityPoolId: 'us-east-1:1699ebc0-7900-4099-b910-2df94f52a030', + * IdentityId: 'us-east-1:128d0a74-c82f-4553-916d-90053e4a8b0f' + * + * // optional, only necessary when the identity pool is not configured + * // to use IAM roles in the Amazon Cognito Console + * // See the RoleArn param for AWS.STS.assumeRoleWithWebIdentity (linked below) + * RoleArn: 'arn:aws:iam::1234567890:role/MYAPP-CognitoIdentity', + * + * // optional tokens, used for authenticated login + * // See the Logins param for AWS.CognitoIdentity.getID (linked below) + * Logins: { + * 'graph.facebook.com': 'FBTOKEN', + * 'www.amazon.com': 'AMAZONTOKEN', + * 'accounts.google.com': 'GOOGLETOKEN', + * 'api.twitter.com': 'TWITTERTOKEN', + * 'www.digits.com': 'DIGITSTOKEN' + * }, + * + * // optional name, defaults to web-identity + * // See the RoleSessionName param for AWS.STS.assumeRoleWithWebIdentity (linked below) + * RoleSessionName: 'web', + * + * // optional, only necessary when application runs in a browser + * // and multiple users are signed in at once, used for caching + * LoginId: 'example@gmail.com' + * + * }, { + * // optionally provide configuration to apply to the underlying service clients + * // if configuration is not provided, then configuration will be pulled from AWS.config + * + * // region should match the region your identity pool is located in + * region: 'us-east-1', + * + * // specify timeout options + * httpOptions: { + * timeout: 100 + * } + * }); + * @see AWS.CognitoIdentity.getId + * @see AWS.CognitoIdentity.getCredentialsForIdentity + * @see AWS.STS.assumeRoleWithWebIdentity + * @see AWS.CognitoIdentity.getOpenIdToken + * @see AWS.Config + * @note If a region is not provided in the global AWS.config, or + * specified in the `clientConfig` to the CognitoIdentityCredentials + * constructor, you may encounter a 'Missing credentials in config' error + * when calling making a service call. + */ + constructor: function CognitoIdentityCredentials(params, clientConfig) { + AWS.Credentials.call(this); + this.expired = true; + this.params = params; + this.data = null; + this._identityId = null; + this._clientConfig = AWS.util.copy(clientConfig || {}); + this.loadCachedId(); + var self = this; + Object.defineProperty(this, 'identityId', { + get: function() { + self.loadCachedId(); + return self._identityId || self.params.IdentityId; + }, + set: function(identityId) { + self._identityId = identityId; + } + }); + }, -/***/ }), -/* 271 */, -/* 272 */, -/* 273 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + /** + * Refreshes credentials using {AWS.CognitoIdentity.getCredentialsForIdentity}, + * or {AWS.STS.assumeRoleWithWebIdentity}. + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see AWS.Credentials.get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, -var EndpointMode = __webpack_require__(587)(); + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + self.createClients(); + self.data = null; + self._identityId = null; + self.getId(function(err) { + if (!err) { + if (!self.params.RoleArn) { + self.getCredentialsForIdentity(callback); + } else { + self.getCredentialsFromSTS(callback); + } + } else { + self.clearIdOnNotAuthorized(err); + callback(err); + } + }); + }, -var ENV_ENDPOINT_MODE_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE'; -var CONFIG_ENDPOINT_MODE_NAME = 'ec2_metadata_service_endpoint_mode'; - -var getEndpointModeConfigOptions = function() { - return { - environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_MODE_NAME]; }, - configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_MODE_NAME]; }, - default: EndpointMode.IPv4, - }; -}; - -module.exports = getEndpointModeConfigOptions; + /** + * Clears the cached Cognito ID associated with the currently configured + * identity pool ID. Use this to manually invalidate your cache if + * the identity pool ID was deleted. + */ + clearCachedId: function clearCache() { + this._identityId = null; + delete this.params.IdentityId; + var poolId = this.params.IdentityPoolId; + var loginId = this.params.LoginId || ''; + delete this.storage[this.localStorageKey.id + poolId + loginId]; + delete this.storage[this.localStorageKey.providers + poolId + loginId]; + }, -/***/ }), -/* 274 */, -/* 275 */, -/* 276 */, -/* 277 */, -/* 278 */, -/* 279 */, -/* 280 */ -/***/ (function(module) { + /** + * @api private + */ + clearIdOnNotAuthorized: function clearIdOnNotAuthorized(err) { + var self = this; + if (err.code == 'NotAuthorizedException') { + self.clearCachedId(); + } + }, -module.exports = register + /** + * Retrieves a Cognito ID, loading from cache if it was already retrieved + * on this device. + * + * @callback callback function(err, identityId) + * @param err [Error, null] an error object if the call failed or null if + * it succeeded. + * @param identityId [String, null] if successful, the callback will return + * the Cognito ID. + * @note If not loaded explicitly, the Cognito ID is loaded and stored in + * localStorage in the browser environment of a device. + * @api private + */ + getId: function getId(callback) { + var self = this; + if (typeof self.params.IdentityId === 'string') { + return callback(null, self.params.IdentityId); + } -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } + self.cognito.getId(function(err, data) { + if (!err && data.IdentityId) { + self.params.IdentityId = data.IdentityId; + callback(null, data.IdentityId); + } else { + callback(err); + } + }); + }, - if (!options) { - options = {} - } - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } + /** + * @api private + */ + loadCredentials: function loadCredentials(data, credentials) { + if (!data || !credentials) return; + credentials.expired = false; + credentials.accessKeyId = data.Credentials.AccessKeyId; + credentials.secretAccessKey = data.Credentials.SecretKey; + credentials.sessionToken = data.Credentials.SessionToken; + credentials.expireTime = data.Credentials.Expiration; + }, - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) + /** + * @api private + */ + getCredentialsForIdentity: function getCredentialsForIdentity(callback) { + var self = this; + self.cognito.getCredentialsForIdentity(function(err, data) { + if (!err) { + self.cacheId(data); + self.data = data; + self.loadCredentials(self.data, self); + } else { + self.clearIdOnNotAuthorized(err); } + callback(err); + }); + }, - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} + /** + * @api private + */ + getCredentialsFromSTS: function getCredentialsFromSTS(callback) { + var self = this; + self.cognito.getOpenIdToken(function(err, data) { + if (!err) { + self.cacheId(data); + self.params.WebIdentityToken = data.Token; + self.webIdentityCredentials.refresh(function(webErr) { + if (!webErr) { + self.data = self.webIdentityCredentials.data; + self.sts.credentialsFrom(self.data, self); + } + callback(webErr); + }); + } else { + self.clearIdOnNotAuthorized(err); + callback(err); + } + }); + }, + /** + * @api private + */ + loadCachedId: function loadCachedId() { + var self = this; -/***/ }), -/* 281 */, -/* 282 */, -/* 283 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + // in the browser we source default IdentityId from localStorage + if (AWS.util.isBrowser() && !self.params.IdentityId) { + var id = self.getStorage('id'); + if (id && self.params.Logins) { + var actualProviders = Object.keys(self.params.Logins); + var cachedProviders = + (self.getStorage('providers') || '').split(','); -// Generated by CoffeeScript 1.12.7 -(function() { - var NodeType, XMLNode, XMLRaw, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; + // only load ID if at least one provider used this ID before + var intersect = cachedProviders.filter(function(n) { + return actualProviders.indexOf(n) !== -1; + }); + if (intersect.length !== 0) { + self.params.IdentityId = id; + } + } else if (id) { + self.params.IdentityId = id; + } + } + }, - NodeType = __webpack_require__(35); + /** + * @api private + */ + createClients: function() { + var clientConfig = this._clientConfig; + this.webIdentityCredentials = this.webIdentityCredentials || + new AWS.WebIdentityCredentials(this.params, clientConfig); + if (!this.cognito) { + var cognitoConfig = AWS.util.merge({}, clientConfig); + cognitoConfig.params = this.params; + this.cognito = new CognitoIdentity(cognitoConfig); + } + this.sts = this.sts || new STS(clientConfig); + }, - XMLNode = __webpack_require__(855); + /** + * @api private + */ + cacheId: function cacheId(data) { + this._identityId = data.IdentityId; + this.params.IdentityId = this._identityId; - module.exports = XMLRaw = (function(superClass) { - extend(XMLRaw, superClass); + // cache this IdentityId in browser localStorage if possible + if (AWS.util.isBrowser()) { + this.setStorage('id', data.IdentityId); - function XMLRaw(parent, text) { - XMLRaw.__super__.constructor.call(this, parent); - if (text == null) { - throw new Error("Missing raw text. " + this.debugInfo()); + if (this.params.Logins) { + this.setStorage('providers', Object.keys(this.params.Logins).join(',')); } - this.type = NodeType.Raw; - this.value = this.stringify.raw(text); } + }, - XMLRaw.prototype.clone = function() { - return Object.create(this); - }; + /** + * @api private + */ + getStorage: function getStorage(key) { + return this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')]; + }, - XMLRaw.prototype.toString = function(options) { - return this.options.writer.raw(this, this.options.writer.filterOptions(options)); - }; + /** + * @api private + */ + setStorage: function setStorage(key, val) { + try { + this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')] = val; + } catch (_) {} + }, - return XMLRaw; + /** + * @api private + */ + storage: (function() { + try { + var storage = AWS.util.isBrowser() && window.localStorage !== null && typeof window.localStorage === 'object' ? + window.localStorage : {}; - })(XMLNode); + // Test set/remove which would throw an error in Safari's private browsing + storage['aws.test-storage'] = 'foobar'; + delete storage['aws.test-storage']; -}).call(this); + return storage; + } catch (_) { + return {}; + } + })() +}); /***/ }), -/* 284 */ -/***/ (function(module) { -"use strict"; +/***/ 6965: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); +var AWS = __nccwpck_require__(8437); - var r = range(a, b, str); +/** + * Creates a credential provider chain that searches for AWS credentials + * in a list of credential providers specified by the {providers} property. + * + * By default, the chain will use the {defaultProviders} to resolve credentials. + * These providers will look in the environment using the + * {AWS.EnvironmentCredentials} class with the 'AWS' and 'AMAZON' prefixes. + * + * ## Setting Providers + * + * Each provider in the {providers} list should be a function that returns + * a {AWS.Credentials} object, or a hardcoded credentials object. The function + * form allows for delayed execution of the credential construction. + * + * ## Resolving Credentials from a Chain + * + * Call {resolve} to return the first valid credential object that can be + * loaded by the provider chain. + * + * For example, to resolve a chain with a custom provider that checks a file + * on disk after the set of {defaultProviders}: + * + * ```javascript + * var diskProvider = new AWS.FileSystemCredentials('./creds.json'); + * var chain = new AWS.CredentialProviderChain(); + * chain.providers.push(diskProvider); + * chain.resolve(); + * ``` + * + * The above code will return the `diskProvider` object if the + * file contains credentials and the `defaultProviders` do not contain + * any credential settings. + * + * @!attribute providers + * @return [Array] + * a list of credentials objects or functions that return credentials + * objects. If the provider is a function, the function will be + * executed lazily when the provider needs to be checked for valid + * credentials. By default, this object will be set to the + * {defaultProviders}. + * @see defaultProviders + */ +AWS.CredentialProviderChain = AWS.util.inherit(AWS.Credentials, { - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + /** + * Creates a new CredentialProviderChain with a default set of providers + * specified by {defaultProviders}. + */ + constructor: function CredentialProviderChain(providers) { + if (providers) { + this.providers = providers; + } else { + this.providers = AWS.CredentialProviderChain.defaultProviders.slice(0); + } + this.resolveCallbacks = []; + }, -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + /** + * @!method resolvePromise() + * Returns a 'thenable' promise. + * Resolves the provider chain by searching for the first set of + * credentials in {providers}. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function(credentials) + * Called if the promise is fulfilled and the provider resolves the chain + * to a credentials object + * @param credentials [AWS.Credentials] the credentials object resolved + * by the provider chain. + * @callback rejectedCallback function(error) + * Called if the promise is rejected. + * @param err [Error] the error object returned if no credentials are found. + * @return [Promise] A promise that represents the state of the `resolve` method call. + * @example Calling the `resolvePromise` method. + * var promise = chain.resolvePromise(); + * promise.then(function(credentials) { ... }, function(err) { ... }); + */ -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + /** + * Resolves the provider chain by searching for the first set of + * credentials in {providers}. + * + * @callback callback function(err, credentials) + * Called when the provider resolves the chain to a credentials object + * or null if no credentials can be found. + * + * @param err [Error] the error object returned if no credentials are + * found. + * @param credentials [AWS.Credentials] the credentials object resolved + * by the provider chain. + * @return [AWS.CredentialProviderChain] the provider, for chaining. + */ + resolve: function resolve(callback) { + var self = this; + if (self.providers.length === 0) { + callback(new Error('No providers')); + return self; + } - if (ai >= 0 && bi > 0) { - begs = []; - left = str.length; + if (self.resolveCallbacks.push(callback) === 1) { + var index = 0; + var providers = self.providers.slice(0); - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; + function resolveNext(err, creds) { + if ((!err && creds) || index === providers.length) { + AWS.util.arrayEach(self.resolveCallbacks, function (callback) { + callback(err, creds); + }); + self.resolveCallbacks.length = 0; + return; } - bi = str.indexOf(b, i + 1); + var provider = providers[index++]; + if (typeof provider === 'function') { + creds = provider.call(); + } else { + creds = provider; + } + + if (creds.get) { + creds.get(function (getErr) { + resolveNext(getErr, getErr ? null : creds); + }); + } else { + resolveNext(null, creds); + } } - i = ai < bi && ai >= 0 ? ai : bi; + resolveNext(); } - if (begs.length) { - result = [ left, right ]; - } + return self; } +}); - return result; -} - +/** + * The default set of providers used by a vanilla CredentialProviderChain. + * + * In the browser: + * + * ```javascript + * AWS.CredentialProviderChain.defaultProviders = [] + * ``` + * + * In Node.js: + * + * ```javascript + * AWS.CredentialProviderChain.defaultProviders = [ + * function () { return new AWS.EnvironmentCredentials('AWS'); }, + * function () { return new AWS.EnvironmentCredentials('AMAZON'); }, + * function () { return new AWS.SsoCredentials(); }, + * function () { return new AWS.SharedIniFileCredentials(); }, + * function () { return new AWS.ECSCredentials(); }, + * function () { return new AWS.ProcessCredentials(); }, + * function () { return new AWS.TokenFileWebIdentityCredentials(); }, + * function () { return new AWS.EC2MetadataCredentials() } + * ] + * ``` + */ +AWS.CredentialProviderChain.defaultProviders = []; -/***/ }), -/* 285 */, -/* 286 */, -/* 287 */, -/* 288 */, -/* 289 */, -/* 290 */, -/* 291 */, -/* 292 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/** + * @api private + */ +AWS.CredentialProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); +}; -;(function (sax) { // wrapper for non-node envs - sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } - sax.SAXParser = SAXParser - sax.SAXStream = SAXStream - sax.createStream = createStream +/** + * @api private + */ +AWS.CredentialProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.resolvePromise; +}; - // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. - // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), - // since that's the earliest that a buffer overrun could occur. This way, checks are - // as rare as required, but as often as necessary to ensure never crossing this bound. - // Furthermore, buffers are only tested at most once per write(), so passing a very - // large string into write() might have undesirable effects, but this is manageable by - // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme - // edge case, result in creating at most one complete copy of the string passed in. - // Set to Infinity to have unlimited buffers. - sax.MAX_BUFFER_LENGTH = 64 * 1024 +AWS.util.addPromises(AWS.CredentialProviderChain); - var buffers = [ - 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', - 'procInstName', 'procInstBody', 'entity', 'attribName', - 'attribValue', 'cdata', 'script' - ] - sax.EVENTS = [ - 'text', - 'processinginstruction', - 'sgmldeclaration', - 'doctype', - 'comment', - 'opentagstart', - 'attribute', - 'opentag', - 'closetag', - 'opencdata', - 'cdata', - 'closecdata', - 'error', - 'end', - 'ready', - 'script', - 'opennamespace', - 'closenamespace' - ] +/***/ }), - function SAXParser (strict, opt) { - if (!(this instanceof SAXParser)) { - return new SAXParser(strict, opt) - } +/***/ 3379: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - var parser = this - clearBuffers(parser) - parser.q = parser.c = '' - parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH - parser.opt = opt || {} - parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags - parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' - parser.tags = [] - parser.closed = parser.closedRoot = parser.sawRoot = false - parser.tag = parser.error = null - parser.strict = !!strict - parser.noscript = !!(strict || parser.opt.noscript) - parser.state = S.BEGIN - parser.strictEntities = parser.opt.strictEntities - parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) - parser.attribList = [] +var AWS = __nccwpck_require__(8437); +__nccwpck_require__(5768); - // namespaces form a prototype chain. - // it always points at the current tag, - // which protos to its parent tag. - if (parser.opt.xmlns) { - parser.ns = Object.create(rootNS) - } +/** + * Represents credentials received from the metadata service on an EC2 instance. + * + * By default, this class will connect to the metadata service using + * {AWS.MetadataService} and attempt to load any available credentials. If it + * can connect, and credentials are available, these will be used with zero + * configuration. + * + * This credentials class will by default timeout after 1 second of inactivity + * and retry 3 times. + * If your requests to the EC2 metadata service are timing out, you can increase + * these values by configuring them directly: + * + * ```javascript + * AWS.config.credentials = new AWS.EC2MetadataCredentials({ + * httpOptions: { timeout: 5000 }, // 5 second timeout + * maxRetries: 10, // retry 10 times + * retryDelayOptions: { base: 200 }, // see AWS.Config for information + * logger: console // see AWS.Config for information + * }); + * ``` + * + * If your requests are timing out in connecting to the metadata service, such + * as when testing on a development machine, you can use the connectTimeout + * option, specified in milliseconds, which also defaults to 1 second. + * + * If the requests failed or returns expired credentials, it will + * extend the expiration of current credential, with a warning message. For more + * information, please go to: + * https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html + * + * @!attribute originalExpiration + * @return [Date] The optional original expiration of the current credential. + * In case of AWS outage, the EC2 metadata will extend expiration of the + * existing credential. + * + * @see AWS.Config.retryDelayOptions + * @see AWS.Config.logger + * + * @!macro nobrowser + */ +AWS.EC2MetadataCredentials = AWS.util.inherit(AWS.Credentials, { + constructor: function EC2MetadataCredentials(options) { + AWS.Credentials.call(this); - // mostly just for error reporting - parser.trackPosition = parser.opt.position !== false - if (parser.trackPosition) { - parser.position = parser.line = parser.column = 0 - } - emit(parser, 'onready') - } + options = options ? AWS.util.copy(options) : {}; + options = AWS.util.merge( + {maxRetries: this.defaultMaxRetries}, options); + if (!options.httpOptions) options.httpOptions = {}; + options.httpOptions = AWS.util.merge( + {timeout: this.defaultTimeout, + connectTimeout: this.defaultConnectTimeout}, + options.httpOptions); - if (!Object.create) { - Object.create = function (o) { - function F () {} - F.prototype = o - var newf = new F() - return newf - } - } + this.metadataService = new AWS.MetadataService(options); + this.logger = options.logger || AWS.config && AWS.config.logger; + }, - if (!Object.keys) { - Object.keys = function (o) { - var a = [] - for (var i in o) if (o.hasOwnProperty(i)) a.push(i) - return a - } - } + /** + * @api private + */ + defaultTimeout: 1000, - function checkBufferLength (parser) { - var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) - var maxActual = 0 - for (var i = 0, l = buffers.length; i < l; i++) { - var len = parser[buffers[i]].length - if (len > maxAllowed) { - // Text/cdata nodes can get big, and since they're buffered, - // we can get here under normal conditions. - // Avoid issues by emitting the text node now, - // so at least it won't get any bigger. - switch (buffers[i]) { - case 'textNode': - closeText(parser) - break + /** + * @api private + */ + defaultConnectTimeout: 1000, - case 'cdata': - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - break + /** + * @api private + */ + defaultMaxRetries: 3, - case 'script': - emitNode(parser, 'onscript', parser.script) - parser.script = '' - break + /** + * The original expiration of the current credential. In case of AWS + * outage, the EC2 metadata will extend expiration of the existing + * credential. + */ + originalExpiration: undefined, - default: - error(parser, 'Max buffer length exceeded: ' + buffers[i]) + /** + * Loads the credentials from the instance metadata service + * + * @callback callback function(err) + * Called when the instance metadata service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + self.metadataService.loadCredentials(function(err, creds) { + if (err) { + if (self.hasLoadedCredentials()) { + self.extendExpirationIfExpired(); + callback(); + } else { + callback(err); } + } else { + self.setCredentials(creds); + self.extendExpirationIfExpired(); + callback(); } - maxActual = Math.max(maxActual, len) - } - // schedule the next check for the earliest possible buffer overrun. - var m = sax.MAX_BUFFER_LENGTH - maxActual - parser.bufferCheckPosition = m + parser.position - } + }); + }, - function clearBuffers (parser) { - for (var i = 0, l = buffers.length; i < l; i++) { - parser[buffers[i]] = '' - } - } + /** + * Whether this credential has been loaded. + * @api private + */ + hasLoadedCredentials: function hasLoadedCredentials() { + return this.AccessKeyId && this.secretAccessKey; + }, - function flushBuffers (parser) { - closeText(parser) - if (parser.cdata !== '') { - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - } - if (parser.script !== '') { - emitNode(parser, 'onscript', parser.script) - parser.script = '' + /** + * if expired, extend the expiration by 15 minutes base plus a jitter of 5 + * minutes range. + * @api private + */ + extendExpirationIfExpired: function extendExpirationIfExpired() { + if (this.needsRefresh()) { + this.originalExpiration = this.originalExpiration || this.expireTime; + this.expired = false; + var nextTimeout = 15 * 60 + Math.floor(Math.random() * 5 * 60); + var currentTime = AWS.util.date.getDate().getTime(); + this.expireTime = new Date(currentTime + nextTimeout * 1000); + // TODO: add doc link; + this.logger.warn('Attempting credential expiration extension due to a ' + + 'credential service availability issue. A refresh of these ' + + 'credentials will be attempted again at ' + this.expireTime + + '\nFor more information, please visit: https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html'); } - } + }, - SAXParser.prototype = { - end: function () { end(this) }, - write: write, - resume: function () { this.error = null; return this }, - close: function () { return this.write(null) }, - flush: function () { flushBuffers(this) } + /** + * Update the credential with new credential responded from EC2 metadata + * service. + * @api private + */ + setCredentials: function setCredentials(creds) { + var currentTime = AWS.util.date.getDate().getTime(); + var expireTime = new Date(creds.Expiration); + this.expired = currentTime >= expireTime ? true : false; + this.metadata = creds; + this.accessKeyId = creds.AccessKeyId; + this.secretAccessKey = creds.SecretAccessKey; + this.sessionToken = creds.Token; + this.expireTime = expireTime; } +}); - var Stream - try { - Stream = __webpack_require__(794).Stream - } catch (ex) { - Stream = function () {} - } - var streamWraps = sax.EVENTS.filter(function (ev) { - return ev !== 'error' && ev !== 'end' - }) +/***/ }), - function createStream (strict, opt) { - return new SAXStream(strict, opt) - } +/***/ 645: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - function SAXStream (strict, opt) { - if (!(this instanceof SAXStream)) { - return new SAXStream(strict, opt) - } +var AWS = __nccwpck_require__(8437); - Stream.apply(this) +/** + * Represents credentials received from relative URI specified in the ECS container. + * + * This class will request refreshable credentials from the relative URI + * specified by the AWS_CONTAINER_CREDENTIALS_RELATIVE_URI or the + * AWS_CONTAINER_CREDENTIALS_FULL_URI environment variable. If valid credentials + * are returned in the response, these will be used with zero configuration. + * + * This credentials class will by default timeout after 1 second of inactivity + * and retry 3 times. + * If your requests to the relative URI are timing out, you can increase + * the value by configuring them directly: + * + * ```javascript + * AWS.config.credentials = new AWS.ECSCredentials({ + * httpOptions: { timeout: 5000 }, // 5 second timeout + * maxRetries: 10, // retry 10 times + * retryDelayOptions: { base: 200 } // see AWS.Config for information + * }); + * ``` + * + * @see AWS.Config.retryDelayOptions + * + * @!macro nobrowser + */ +AWS.ECSCredentials = AWS.RemoteCredentials; - this._parser = new SAXParser(strict, opt) - this.writable = true - this.readable = true - var me = this +/***/ }), - this._parser.onend = function () { - me.emit('end') - } +/***/ 7714: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - this._parser.onerror = function (er) { - me.emit('error', er) +var AWS = __nccwpck_require__(8437); - // if didn't throw, then means error was handled. - // go ahead and clear error, so we can write again. - me._parser.error = null - } +/** + * Represents credentials from the environment. + * + * By default, this class will look for the matching environment variables + * prefixed by a given {envPrefix}. The un-prefixed environment variable names + * for each credential value is listed below: + * + * ```javascript + * accessKeyId: ACCESS_KEY_ID + * secretAccessKey: SECRET_ACCESS_KEY + * sessionToken: SESSION_TOKEN + * ``` + * + * With the default prefix of 'AWS', the environment variables would be: + * + * AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN + * + * @!attribute envPrefix + * @readonly + * @return [String] the prefix for the environment variable names excluding + * the separating underscore ('_'). + */ +AWS.EnvironmentCredentials = AWS.util.inherit(AWS.Credentials, { - this._decoder = null + /** + * Creates a new EnvironmentCredentials class with a given variable + * prefix {envPrefix}. For example, to load credentials using the 'AWS' + * prefix: + * + * ```javascript + * var creds = new AWS.EnvironmentCredentials('AWS'); + * creds.accessKeyId == 'AKID' // from AWS_ACCESS_KEY_ID env var + * ``` + * + * @param envPrefix [String] the prefix to use (e.g., 'AWS') for environment + * variables. Do not include the separating underscore. + */ + constructor: function EnvironmentCredentials(envPrefix) { + AWS.Credentials.call(this); + this.envPrefix = envPrefix; + this.get(function() {}); + }, - streamWraps.forEach(function (ev) { - Object.defineProperty(me, 'on' + ev, { - get: function () { - return me._parser['on' + ev] - }, - set: function (h) { - if (!h) { - me.removeAllListeners(ev) - me._parser['on' + ev] = h - return h - } - me.on(ev, h) - }, - enumerable: true, - configurable: false - }) - }) - } + /** + * Loads credentials from the environment using the prefixed + * environment variables. + * + * @callback callback function(err) + * Called after the (prefixed) ACCESS_KEY_ID, SECRET_ACCESS_KEY, and + * SESSION_TOKEN environment variables are read. When this callback is + * called with no error, it means that the credentials information has + * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + if (!callback) callback = AWS.util.fn.callback; - SAXStream.prototype = Object.create(Stream.prototype, { - constructor: { - value: SAXStream + if (!process || !process.env) { + callback(AWS.util.error( + new Error('No process info or environment variables available'), + { code: 'EnvironmentCredentialsProviderFailure' } + )); + return; } - }) - SAXStream.prototype.write = function (data) { - if (typeof Buffer === 'function' && - typeof Buffer.isBuffer === 'function' && - Buffer.isBuffer(data)) { - if (!this._decoder) { - var SD = __webpack_require__(304).StringDecoder - this._decoder = new SD('utf8') + var keys = ['ACCESS_KEY_ID', 'SECRET_ACCESS_KEY', 'SESSION_TOKEN']; + var values = []; + + for (var i = 0; i < keys.length; i++) { + var prefix = ''; + if (this.envPrefix) prefix = this.envPrefix + '_'; + values[i] = process.env[prefix + keys[i]]; + if (!values[i] && keys[i] !== 'SESSION_TOKEN') { + callback(AWS.util.error( + new Error('Variable ' + prefix + keys[i] + ' not set.'), + { code: 'EnvironmentCredentialsProviderFailure' } + )); + return; } - data = this._decoder.write(data) } - this._parser.write(data.toString()) - this.emit('data', data) - return true + this.expired = false; + AWS.Credentials.apply(this, values); + callback(); } - SAXStream.prototype.end = function (chunk) { - if (chunk && chunk.length) { - this.write(chunk) - } - this._parser.end() - return true - } +}); - SAXStream.prototype.on = function (ev, handler) { - var me = this - if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { - me._parser['on' + ev] = function () { - var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) - args.splice(0, 0, ev) - me.emit.apply(me, args) - } - } - return Stream.prototype.on.call(me, ev, handler) - } +/***/ }), - // this really needs to be replaced with character classes. - // XML allows all manner of ridiculous numbers and digits. - var CDATA = '[CDATA[' - var DOCTYPE = 'DOCTYPE' - var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' - var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' - var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } +/***/ 7454: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - // http://www.w3.org/TR/REC-xml/#NT-NameStartChar - // This implementation works on strings, a single character at a time - // as such, it cannot ever support astral-plane characters (10000-EFFFF) - // without a significant breaking change to either this parser, or the - // JavaScript language. Implementation of an emoji-capable xml parser - // is left as an exercise for the reader. - var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ +var AWS = __nccwpck_require__(8437); - var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ +/** + * Represents credentials from a JSON file on disk. + * If the credentials expire, the SDK can {refresh} the credentials + * from the file. + * + * The format of the file should be similar to the options passed to + * {AWS.Config}: + * + * ```javascript + * {accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'optional'} + * ``` + * + * @example Loading credentials from disk + * var creds = new AWS.FileSystemCredentials('./configuration.json'); + * creds.accessKeyId == 'AKID' + * + * @!attribute filename + * @readonly + * @return [String] the path to the JSON file on disk containing the + * credentials. + * @!macro nobrowser + */ +AWS.FileSystemCredentials = AWS.util.inherit(AWS.Credentials, { - var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + /** + * @overload AWS.FileSystemCredentials(filename) + * Creates a new FileSystemCredentials object from a filename + * + * @param filename [String] the path on disk to the JSON file to load. + */ + constructor: function FileSystemCredentials(filename) { + AWS.Credentials.call(this); + this.filename = filename; + this.get(function() {}); + }, - function isWhitespace (c) { - return c === ' ' || c === '\n' || c === '\r' || c === '\t' + /** + * Loads the credentials from the {filename} on disk. + * + * @callback callback function(err) + * Called after the JSON file on disk is read and parsed. When this callback + * is called with no error, it means that the credentials information + * has been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + if (!callback) callback = AWS.util.fn.callback; + try { + var creds = JSON.parse(AWS.util.readFileSync(this.filename)); + AWS.Credentials.call(this, creds); + if (!this.accessKeyId || !this.secretAccessKey) { + throw AWS.util.error( + new Error('Credentials not set in ' + this.filename), + { code: 'FileSystemCredentialsProviderFailure' } + ); + } + this.expired = false; + callback(); + } catch (err) { + callback(err); + } } - function isQuote (c) { - return c === '"' || c === '\'' - } +}); - function isAttribEnd (c) { - return c === '>' || isWhitespace(c) - } - function isMatch (regex, c) { - return regex.test(c) - } +/***/ }), - function notMatch (regex, c) { - return !isMatch(regex, c) - } +/***/ 371: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - var S = 0 - sax.STATE = { - BEGIN: S++, // leading byte order mark or whitespace - BEGIN_WHITESPACE: S++, // leading whitespace - TEXT: S++, // general stuff - TEXT_ENTITY: S++, // & and such. - OPEN_WAKA: S++, // < - SGML_DECL: S++, // - SCRIPT: S++, //