diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index c856d76..aa582d6 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -15,7 +15,6 @@ jobs:
npm install
npm run prepare-deps
npm run all
- git diff --exit-code
test: # make sure the action works on a clean machine without building
runs-on: ubuntu-latest
steps:
@@ -23,5 +22,5 @@ jobs:
- uses: ./
with:
version: v1.26
- args: --issues-exit-code=0 ./...
- working-directory: sample
+ args: --issues-exit-code=0 ./sample/...
+ only-new-issues: true
diff --git a/README.md b/README.md
index 725adc5..0237c90 100644
--- a/README.md
+++ b/README.md
@@ -37,6 +37,9 @@ jobs:
# Optional: golangci-lint command line arguments.
# args: --issues-exit-code=0
+
+ # Optional: show only new issues if it's a pull request. The default value is `false`.
+ # only-new-issues: true
```
We recommend running this action in a job separate from other jobs (`go test`, etc)
diff --git a/action.yml b/action.yml
index 2f05153..dac4c03 100644
--- a/action.yml
+++ b/action.yml
@@ -13,6 +13,14 @@ inputs:
working-directory:
description: 'golangci-lint working directory, default is project root'
required: false
+ github-token:
+ description: 'the token is used for fetching patch of a pull request to show only new issues'
+ default: ${{ github.token }}
+ required: true
+ only-new-issues:
+ description: 'if set to true and the action runs on a pull request - the action outputs only newly found issues'
+ default: false
+ required: true
runs:
using: 'node12'
diff --git a/dist/post_run/index.js b/dist/post_run/index.js
index f77fea1..809ad74 100644
--- a/dist/post_run/index.js
+++ b/dist/post_run/index.js
@@ -340,610 +340,565 @@ function copyFile(srcFile, destFile, force) {
/***/ }),
-/***/ 9:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/***/ 2:
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+const os = __webpack_require__(87);
+const macosRelease = __webpack_require__(118);
+const winRelease = __webpack_require__(49);
+
+const osName = (platform, release) => {
+ if (!platform && release) {
+ throw new Error('You can\'t specify a `release` without specifying `platform`');
+ }
+
+ platform = platform || os.platform();
+
+ let id;
+
+ if (platform === 'darwin') {
+ if (!release && os.platform() === 'darwin') {
+ release = os.release();
+ }
+
+ const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS';
+ id = release ? macosRelease(release).name : '';
+ return prefix + (id ? ' ' + id : '');
+ }
+
+ if (platform === 'linux') {
+ if (!release && os.platform() === 'linux') {
+ release = os.release();
+ }
+
+ id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : '';
+ return 'Linux' + (id ? ' ' + id : '');
+ }
+
+ if (platform === 'win32') {
+ if (!release && os.platform() === 'win32') {
+ release = os.release();
+ }
+
+ id = release ? winRelease(release) : '';
+ return 'Windows' + (id ? ' ' + id : '');
+ }
+
+ return platform;
};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
+
+module.exports = osName;
+
+
+/***/ }),
+
+/***/ 9:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(969);
+
+var noop = function() {};
+
+var isRequest = function(stream) {
+ return stream.setHeader && typeof stream.abort === 'function';
};
-Object.defineProperty(exports, "__esModule", { value: true });
-const os = __importStar(__webpack_require__(87));
-const events = __importStar(__webpack_require__(614));
-const child = __importStar(__webpack_require__(129));
-const path = __importStar(__webpack_require__(622));
-const io = __importStar(__webpack_require__(1));
-const ioUtil = __importStar(__webpack_require__(672));
-/* eslint-disable @typescript-eslint/unbound-method */
-const IS_WINDOWS = process.platform === 'win32';
-/*
- * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
- */
-class ToolRunner extends events.EventEmitter {
- constructor(toolPath, args, options) {
- super();
- if (!toolPath) {
- throw new Error("Parameter 'toolPath' cannot be null or empty.");
- }
- this.toolPath = toolPath;
- this.args = args || [];
- this.options = options || {};
+
+var isChildProcess = function(stream) {
+ return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
+};
+
+var eos = function(stream, opts, callback) {
+ if (typeof opts === 'function') return eos(stream, null, opts);
+ if (!opts) opts = {};
+
+ callback = once(callback || noop);
+
+ var ws = stream._writableState;
+ var rs = stream._readableState;
+ var readable = opts.readable || (opts.readable !== false && stream.readable);
+ var writable = opts.writable || (opts.writable !== false && stream.writable);
+ var cancelled = false;
+
+ var onlegacyfinish = function() {
+ if (!stream.writable) onfinish();
+ };
+
+ var onfinish = function() {
+ writable = false;
+ if (!readable) callback.call(stream);
+ };
+
+ var onend = function() {
+ readable = false;
+ if (!writable) callback.call(stream);
+ };
+
+ var onexit = function(exitCode) {
+ callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
+ };
+
+ var onerror = function(err) {
+ callback.call(stream, err);
+ };
+
+ var onclose = function() {
+ process.nextTick(onclosenexttick);
+ };
+
+ var onclosenexttick = function() {
+ if (cancelled) return;
+ if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
+ if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
+ };
+
+ var onrequest = function() {
+ stream.req.on('finish', onfinish);
+ };
+
+ if (isRequest(stream)) {
+ stream.on('complete', onfinish);
+ stream.on('abort', onclose);
+ if (stream.req) onrequest();
+ else stream.on('request', onrequest);
+ } else if (writable && !ws) { // legacy streams
+ stream.on('end', onlegacyfinish);
+ stream.on('close', onlegacyfinish);
+ }
+
+ if (isChildProcess(stream)) stream.on('exit', onexit);
+
+ stream.on('end', onend);
+ stream.on('finish', onfinish);
+ if (opts.error !== false) stream.on('error', onerror);
+ stream.on('close', onclose);
+
+ return function() {
+ cancelled = true;
+ stream.removeListener('complete', onfinish);
+ stream.removeListener('abort', onclose);
+ stream.removeListener('request', onrequest);
+ if (stream.req) stream.req.removeListener('finish', onfinish);
+ stream.removeListener('end', onlegacyfinish);
+ stream.removeListener('close', onlegacyfinish);
+ stream.removeListener('finish', onfinish);
+ stream.removeListener('exit', onexit);
+ stream.removeListener('end', onend);
+ stream.removeListener('error', onerror);
+ stream.removeListener('close', onclose);
+ };
+};
+
+module.exports = eos;
+
+
+/***/ }),
+
+/***/ 11:
+/***/ (function(module) {
+
+// Returns a wrapper function that returns a wrapped callback
+// The wrapper function should do some stuff, and return a
+// presumably different callback function.
+// This makes sure that own properties are retained, so that
+// decorations and such are not lost along the way.
+module.exports = wrappy
+function wrappy (fn, cb) {
+ if (fn && cb) return wrappy(fn)(cb)
+
+ if (typeof fn !== 'function')
+ throw new TypeError('need wrapper function')
+
+ Object.keys(fn).forEach(function (k) {
+ wrapper[k] = fn[k]
+ })
+
+ return wrapper
+
+ function wrapper() {
+ var args = new Array(arguments.length)
+ for (var i = 0; i < args.length; i++) {
+ args[i] = arguments[i]
}
- _debug(message) {
- if (this.options.listeners && this.options.listeners.debug) {
- this.options.listeners.debug(message);
- }
- }
- _getCommandString(options, noPrefix) {
- const toolPath = this._getSpawnFileName();
- const args = this._getSpawnArgs(options);
- let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
- if (IS_WINDOWS) {
- // Windows + cmd file
- if (this._isCmdFile()) {
- cmd += toolPath;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- // Windows + verbatim
- else if (options.windowsVerbatimArguments) {
- cmd += `"${toolPath}"`;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- // Windows (regular)
- else {
- cmd += this._windowsQuoteCmdArg(toolPath);
- for (const a of args) {
- cmd += ` ${this._windowsQuoteCmdArg(a)}`;
- }
- }
- }
- else {
- // OSX/Linux - this can likely be improved with some form of quoting.
- // creating processes on Unix is fundamentally different than Windows.
- // on Unix, execvp() takes an arg array.
- cmd += toolPath;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- return cmd;
- }
- _processLineBuffer(data, strBuffer, onLine) {
- try {
- let s = strBuffer + data.toString();
- let n = s.indexOf(os.EOL);
- while (n > -1) {
- const line = s.substring(0, n);
- onLine(line);
- // the rest of the string ...
- s = s.substring(n + os.EOL.length);
- n = s.indexOf(os.EOL);
- }
- strBuffer = s;
- }
- catch (err) {
- // streaming lines to console is best effort. Don't fail a build.
- this._debug(`error processing line. Failed with error ${err}`);
- }
- }
- _getSpawnFileName() {
- if (IS_WINDOWS) {
- if (this._isCmdFile()) {
- return process.env['COMSPEC'] || 'cmd.exe';
- }
- }
- return this.toolPath;
- }
- _getSpawnArgs(options) {
- if (IS_WINDOWS) {
- if (this._isCmdFile()) {
- let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
- for (const a of this.args) {
- argline += ' ';
- argline += options.windowsVerbatimArguments
- ? a
- : this._windowsQuoteCmdArg(a);
- }
- argline += '"';
- return [argline];
- }
- }
- return this.args;
- }
- _endsWith(str, end) {
- return str.endsWith(end);
- }
- _isCmdFile() {
- const upperToolPath = this.toolPath.toUpperCase();
- return (this._endsWith(upperToolPath, '.CMD') ||
- this._endsWith(upperToolPath, '.BAT'));
- }
- _windowsQuoteCmdArg(arg) {
- // for .exe, apply the normal quoting rules that libuv applies
- if (!this._isCmdFile()) {
- return this._uvQuoteCmdArg(arg);
- }
- // otherwise apply quoting rules specific to the cmd.exe command line parser.
- // the libuv rules are generic and are not designed specifically for cmd.exe
- // command line parser.
- //
- // for a detailed description of the cmd.exe command line parser, refer to
- // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
- // need quotes for empty arg
- if (!arg) {
- return '""';
- }
- // determine whether the arg needs to be quoted
- const cmdSpecialChars = [
- ' ',
- '\t',
- '&',
- '(',
- ')',
- '[',
- ']',
- '{',
- '}',
- '^',
- '=',
- ';',
- '!',
- "'",
- '+',
- ',',
- '`',
- '~',
- '|',
- '<',
- '>',
- '"'
- ];
- let needsQuotes = false;
- for (const char of arg) {
- if (cmdSpecialChars.some(x => x === char)) {
- needsQuotes = true;
- break;
- }
- }
- // short-circuit if quotes not needed
- if (!needsQuotes) {
- return arg;
- }
- // the following quoting rules are very similar to the rules that by libuv applies.
- //
- // 1) wrap the string in quotes
- //
- // 2) double-up quotes - i.e. " => ""
- //
- // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
- // doesn't work well with a cmd.exe command line.
- //
- // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
- // for example, the command line:
- // foo.exe "myarg:""my val"""
- // is parsed by a .NET console app into an arg array:
- // [ "myarg:\"my val\"" ]
- // which is the same end result when applying libuv quoting rules. although the actual
- // command line from libuv quoting rules would look like:
- // foo.exe "myarg:\"my val\""
- //
- // 3) double-up slashes that precede a quote,
- // e.g. hello \world => "hello \world"
- // hello\"world => "hello\\""world"
- // hello\\"world => "hello\\\\""world"
- // hello world\ => "hello world\\"
- //
- // technically this is not required for a cmd.exe command line, or the batch argument parser.
- // the reasons for including this as a .cmd quoting rule are:
- //
- // a) this is optimized for the scenario where the argument is passed from the .cmd file to an
- // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
- //
- // b) it's what we've been doing previously (by deferring to node default behavior) and we
- // haven't heard any complaints about that aspect.
- //
- // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
- // escaped when used on the command line directly - even though within a .cmd file % can be escaped
- // by using %%.
- //
- // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
- // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
- //
- // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
- // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
- // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
- // to an external program.
- //
- // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
- // % can be escaped within a .cmd file.
- let reverse = '"';
- let quoteHit = true;
- for (let i = arg.length; i > 0; i--) {
- // walk the string in reverse
- reverse += arg[i - 1];
- if (quoteHit && arg[i - 1] === '\\') {
- reverse += '\\'; // double the slash
- }
- else if (arg[i - 1] === '"') {
- quoteHit = true;
- reverse += '"'; // double the quote
- }
- else {
- quoteHit = false;
- }
- }
- reverse += '"';
- return reverse
- .split('')
- .reverse()
- .join('');
- }
- _uvQuoteCmdArg(arg) {
- // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
- // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
- // is used.
- //
- // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
- // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
- // pasting copyright notice from Node within this function:
- //
- // Copyright Joyent, Inc. and other Node contributors. All rights reserved.
- //
- // Permission is hereby granted, free of charge, to any person obtaining a copy
- // of this software and associated documentation files (the "Software"), to
- // deal in the Software without restriction, including without limitation the
- // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- // sell copies of the Software, and to permit persons to whom the Software is
- // furnished to do so, subject to the following conditions:
- //
- // The above copyright notice and this permission notice shall be included in
- // all copies or substantial portions of the Software.
- //
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
- // IN THE SOFTWARE.
- if (!arg) {
- // Need double quotation for empty argument
- return '""';
- }
- if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
- // No quotation needed
- return arg;
- }
- if (!arg.includes('"') && !arg.includes('\\')) {
- // No embedded double quotes or backslashes, so I can just wrap
- // quote marks around the whole thing.
- return `"${arg}"`;
- }
- // Expected input/output:
- // input : hello"world
- // output: "hello\"world"
- // input : hello""world
- // output: "hello\"\"world"
- // input : hello\world
- // output: hello\world
- // input : hello\\world
- // output: hello\\world
- // input : hello\"world
- // output: "hello\\\"world"
- // input : hello\\"world
- // output: "hello\\\\\"world"
- // input : hello world\
- // output: "hello world\\" - note the comment in libuv actually reads "hello world\"
- // but it appears the comment is wrong, it should be "hello world\\"
- let reverse = '"';
- let quoteHit = true;
- for (let i = arg.length; i > 0; i--) {
- // walk the string in reverse
- reverse += arg[i - 1];
- if (quoteHit && arg[i - 1] === '\\') {
- reverse += '\\';
- }
- else if (arg[i - 1] === '"') {
- quoteHit = true;
- reverse += '\\';
- }
- else {
- quoteHit = false;
- }
- }
- reverse += '"';
- return reverse
- .split('')
- .reverse()
- .join('');
- }
- _cloneExecOptions(options) {
- options = options || {};
- const result = {
- cwd: options.cwd || process.cwd(),
- env: options.env || process.env,
- silent: options.silent || false,
- windowsVerbatimArguments: options.windowsVerbatimArguments || false,
- failOnStdErr: options.failOnStdErr || false,
- ignoreReturnCode: options.ignoreReturnCode || false,
- delay: options.delay || 10000
- };
- result.outStream = options.outStream || process.stdout;
- result.errStream = options.errStream || process.stderr;
- return result;
- }
- _getSpawnOptions(options, toolPath) {
- options = options || {};
- const result = {};
- result.cwd = options.cwd;
- result.env = options.env;
- result['windowsVerbatimArguments'] =
- options.windowsVerbatimArguments || this._isCmdFile();
- if (options.windowsVerbatimArguments) {
- result.argv0 = `"${toolPath}"`;
- }
- return result;
- }
- /**
- * Exec a tool.
- * Output will be streamed to the live console.
- * Returns promise with return code
- *
- * @param tool path to tool to exec
- * @param options optional exec options. See ExecOptions
- * @returns number
- */
- exec() {
- return __awaiter(this, void 0, void 0, function* () {
- // root the tool path if it is unrooted and contains relative pathing
- if (!ioUtil.isRooted(this.toolPath) &&
- (this.toolPath.includes('/') ||
- (IS_WINDOWS && this.toolPath.includes('\\')))) {
- // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
- this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
- }
- // if the tool is only a file name, then resolve it from the PATH
- // otherwise verify it exists (add extension on Windows if necessary)
- this.toolPath = yield io.which(this.toolPath, true);
- return new Promise((resolve, reject) => {
- this._debug(`exec tool: ${this.toolPath}`);
- this._debug('arguments:');
- for (const arg of this.args) {
- this._debug(` ${arg}`);
- }
- const optionsNonNull = this._cloneExecOptions(this.options);
- if (!optionsNonNull.silent && optionsNonNull.outStream) {
- optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
- }
- const state = new ExecState(optionsNonNull, this.toolPath);
- state.on('debug', (message) => {
- this._debug(message);
- });
- const fileName = this._getSpawnFileName();
- const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
- const stdbuffer = '';
- if (cp.stdout) {
- cp.stdout.on('data', (data) => {
- if (this.options.listeners && this.options.listeners.stdout) {
- this.options.listeners.stdout(data);
- }
- if (!optionsNonNull.silent && optionsNonNull.outStream) {
- optionsNonNull.outStream.write(data);
- }
- this._processLineBuffer(data, stdbuffer, (line) => {
- if (this.options.listeners && this.options.listeners.stdline) {
- this.options.listeners.stdline(line);
- }
- });
- });
- }
- const errbuffer = '';
- if (cp.stderr) {
- cp.stderr.on('data', (data) => {
- state.processStderr = true;
- if (this.options.listeners && this.options.listeners.stderr) {
- this.options.listeners.stderr(data);
- }
- if (!optionsNonNull.silent &&
- optionsNonNull.errStream &&
- optionsNonNull.outStream) {
- const s = optionsNonNull.failOnStdErr
- ? optionsNonNull.errStream
- : optionsNonNull.outStream;
- s.write(data);
- }
- this._processLineBuffer(data, errbuffer, (line) => {
- if (this.options.listeners && this.options.listeners.errline) {
- this.options.listeners.errline(line);
- }
- });
- });
- }
- cp.on('error', (err) => {
- state.processError = err.message;
- state.processExited = true;
- state.processClosed = true;
- state.CheckComplete();
- });
- cp.on('exit', (code) => {
- state.processExitCode = code;
- state.processExited = true;
- this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
- state.CheckComplete();
- });
- cp.on('close', (code) => {
- state.processExitCode = code;
- state.processExited = true;
- state.processClosed = true;
- this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
- state.CheckComplete();
- });
- state.on('done', (error, exitCode) => {
- if (stdbuffer.length > 0) {
- this.emit('stdline', stdbuffer);
- }
- if (errbuffer.length > 0) {
- this.emit('errline', errbuffer);
- }
- cp.removeAllListeners();
- if (error) {
- reject(error);
- }
- else {
- resolve(exitCode);
- }
- });
- if (this.options.input) {
- if (!cp.stdin) {
- throw new Error('child process missing stdin');
- }
- cp.stdin.end(this.options.input);
- }
- });
- });
+ var ret = fn.apply(this, args)
+ var cb = args[args.length-1]
+ if (typeof ret === 'function' && ret !== cb) {
+ Object.keys(cb).forEach(function (k) {
+ ret[k] = cb[k]
+ })
}
+ return ret
+ }
}
-exports.ToolRunner = ToolRunner;
-/**
- * Convert an arg string to an array of args. Handles escaping
- *
- * @param argString string of arguments
- * @returns string[] array of arguments
- */
-function argStringToArray(argString) {
- const args = [];
- let inQuotes = false;
- let escaped = false;
- let arg = '';
- function append(c) {
- // we only escape double quotes.
- if (escaped && c !== '"') {
- arg += '\\';
- }
- arg += c;
- escaped = false;
- }
- for (let i = 0; i < argString.length; i++) {
- const c = argString.charAt(i);
- if (c === '"') {
- if (!escaped) {
- inQuotes = !inQuotes;
- }
- else {
- append(c);
- }
- continue;
- }
- if (c === '\\' && escaped) {
- append(c);
- continue;
- }
- if (c === '\\' && inQuotes) {
- escaped = true;
- continue;
- }
- if (c === ' ' && !inQuotes) {
- if (arg.length > 0) {
- args.push(arg);
- arg = '';
- }
- continue;
- }
- append(c);
- }
- if (arg.length > 0) {
- args.push(arg.trim());
- }
- return args;
+
+
+/***/ }),
+
+/***/ 13:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const assert = __webpack_require__(357)
+const path = __webpack_require__(622)
+const fs = __webpack_require__(747)
+let glob = undefined
+try {
+ glob = __webpack_require__(402)
+} catch (_err) {
+ // treat glob as optional.
}
-exports.argStringToArray = argStringToArray;
-class ExecState extends events.EventEmitter {
- constructor(options, toolPath) {
- super();
- this.processClosed = false; // tracks whether the process has exited and stdio is closed
- this.processError = '';
- this.processExitCode = 0;
- this.processExited = false; // tracks whether the process has exited
- this.processStderr = false; // tracks whether stderr was written to
- this.delay = 10000; // 10 seconds
- this.done = false;
- this.timeout = null;
- if (!toolPath) {
- throw new Error('toolPath must not be empty');
- }
- this.options = options;
- this.toolPath = toolPath;
- if (options.delay) {
- this.delay = options.delay;
- }
- }
- CheckComplete() {
- if (this.done) {
- return;
- }
- if (this.processClosed) {
- this._setResult();
- }
- else if (this.processExited) {
- this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
- }
- }
- _debug(message) {
- this.emit('debug', message);
- }
- _setResult() {
- // determine whether there is an error
- let error;
- if (this.processExited) {
- if (this.processError) {
- error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
- }
- else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
- error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
- }
- else if (this.processStderr && this.options.failOnStdErr) {
- error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
- }
- }
- // clear the timeout
- if (this.timeout) {
- clearTimeout(this.timeout);
- this.timeout = null;
- }
- this.done = true;
- this.emit('done', error, this.processExitCode);
- }
- static HandleTimeout(state) {
- if (state.done) {
- return;
- }
- if (!state.processClosed && state.processExited) {
- const message = `The STDIO streams did not close within ${state.delay /
- 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
- state._debug(message);
- }
- state._setResult();
- }
+
+const defaultGlobOpts = {
+ nosort: true,
+ silent: true
}
-//# sourceMappingURL=toolrunner.js.map
+
+// for EMFILE handling
+let timeout = 0
+
+const isWindows = (process.platform === "win32")
+
+const defaults = options => {
+ const methods = [
+ 'unlink',
+ 'chmod',
+ 'stat',
+ 'lstat',
+ 'rmdir',
+ 'readdir'
+ ]
+ methods.forEach(m => {
+ options[m] = options[m] || fs[m]
+ m = m + 'Sync'
+ options[m] = options[m] || fs[m]
+ })
+
+ options.maxBusyTries = options.maxBusyTries || 3
+ options.emfileWait = options.emfileWait || 1000
+ if (options.glob === false) {
+ options.disableGlob = true
+ }
+ if (options.disableGlob !== true && glob === undefined) {
+ throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
+ }
+ options.disableGlob = options.disableGlob || false
+ options.glob = options.glob || defaultGlobOpts
+}
+
+const rimraf = (p, options, cb) => {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert.equal(typeof cb, 'function', 'rimraf: callback function required')
+ assert(options, 'rimraf: invalid options argument provided')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ defaults(options)
+
+ let busyTries = 0
+ let errState = null
+ let n = 0
+
+ const next = (er) => {
+ errState = errState || er
+ if (--n === 0)
+ cb(errState)
+ }
+
+ const afterGlob = (er, results) => {
+ if (er)
+ return cb(er)
+
+ n = results.length
+ if (n === 0)
+ return cb()
+
+ results.forEach(p => {
+ const CB = (er) => {
+ if (er) {
+ if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
+ busyTries < options.maxBusyTries) {
+ busyTries ++
+ // try again, with the same exact callback as this one.
+ return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.code === "EMFILE" && timeout < options.emfileWait) {
+ return setTimeout(() => rimraf_(p, options, CB), timeout ++)
+ }
+
+ // already gone
+ if (er.code === "ENOENT") er = null
+ }
+
+ timeout = 0
+ next(er)
+ }
+ rimraf_(p, options, CB)
+ })
+ }
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, (er, stat) => {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
+}
+
+// Two possible strategies.
+// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
+// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
+//
+// Both result in an extra syscall when you guess wrong. However, there
+// are likely far more normal files in the world than directories. This
+// is based on the assumption that a the average number of files per
+// directory is >= 1.
+//
+// If anyone ever complains about this, then I guess the strategy could
+// be made configurable somehow. But until then, YAGNI.
+const rimraf_ = (p, options, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // sunos lets the root user unlink directories, which is... weird.
+ // so we have to lstat here and make sure it's not a dir.
+ options.lstat(p, (er, st) => {
+ if (er && er.code === "ENOENT")
+ return cb(null)
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er && er.code === "EPERM" && isWindows)
+ fixWinEPERM(p, options, er, cb)
+
+ if (st && st.isDirectory())
+ return rmdir(p, options, er, cb)
+
+ options.unlink(p, er => {
+ if (er) {
+ if (er.code === "ENOENT")
+ return cb(null)
+ if (er.code === "EPERM")
+ return (isWindows)
+ ? fixWinEPERM(p, options, er, cb)
+ : rmdir(p, options, er, cb)
+ if (er.code === "EISDIR")
+ return rmdir(p, options, er, cb)
+ }
+ return cb(er)
+ })
+ })
+}
+
+const fixWinEPERM = (p, options, er, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.chmod(p, 0o666, er2 => {
+ if (er2)
+ cb(er2.code === "ENOENT" ? null : er)
+ else
+ options.stat(p, (er3, stats) => {
+ if (er3)
+ cb(er3.code === "ENOENT" ? null : er)
+ else if (stats.isDirectory())
+ rmdir(p, options, er, cb)
+ else
+ options.unlink(p, cb)
+ })
+ })
+}
+
+const fixWinEPERMSync = (p, options, er) => {
+ assert(p)
+ assert(options)
+
+ try {
+ options.chmodSync(p, 0o666)
+ } catch (er2) {
+ if (er2.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ let stats
+ try {
+ stats = options.statSync(p)
+ } catch (er3) {
+ if (er3.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ if (stats.isDirectory())
+ rmdirSync(p, options, er)
+ else
+ options.unlinkSync(p)
+}
+
+const rmdir = (p, options, originalEr, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
+ // if we guessed wrong, and it's not a directory, then
+ // raise the original error.
+ options.rmdir(p, er => {
+ if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
+ rmkids(p, options, cb)
+ else if (er && er.code === "ENOTDIR")
+ cb(originalEr)
+ else
+ cb(er)
+ })
+}
+
+const rmkids = (p, options, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.readdir(p, (er, files) => {
+ if (er)
+ return cb(er)
+ let n = files.length
+ if (n === 0)
+ return options.rmdir(p, cb)
+ let errState
+ files.forEach(f => {
+ rimraf(path.join(p, f), options, er => {
+ if (errState)
+ return
+ if (er)
+ return cb(errState = er)
+ if (--n === 0)
+ options.rmdir(p, cb)
+ })
+ })
+ })
+}
+
+// this looks simpler, and is strictly *faster*, but will
+// tie up the JavaScript thread and fail on excessively
+// deep directory trees.
+const rimrafSync = (p, options) => {
+ options = options || {}
+ defaults(options)
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ let results
+
+ if (options.disableGlob || !glob.hasMagic(p)) {
+ results = [p]
+ } else {
+ try {
+ options.lstatSync(p)
+ results = [p]
+ } catch (er) {
+ results = glob.sync(p, options.glob)
+ }
+ }
+
+ if (!results.length)
+ return
+
+ for (let i = 0; i < results.length; i++) {
+ const p = results[i]
+
+ let st
+ try {
+ st = options.lstatSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er.code === "EPERM" && isWindows)
+ fixWinEPERMSync(p, options, er)
+ }
+
+ try {
+ // sunos lets the root user unlink directories, which is... weird.
+ if (st && st.isDirectory())
+ rmdirSync(p, options, null)
+ else
+ options.unlinkSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "EPERM")
+ return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
+ if (er.code !== "EISDIR")
+ throw er
+
+ rmdirSync(p, options, er)
+ }
+ }
+}
+
+const rmdirSync = (p, options, originalEr) => {
+ assert(p)
+ assert(options)
+
+ try {
+ options.rmdirSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "ENOTDIR")
+ throw originalEr
+ if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
+ rmkidsSync(p, options)
+ }
+}
+
+const rmkidsSync = (p, options) => {
+ assert(p)
+ assert(options)
+ options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
+
+ // We only end up here once we got ENOTEMPTY at least once, and
+ // at this point, we are guaranteed to have removed all the kids.
+ // So, we know that it won't be ENOENT or ENOTDIR or anything else.
+ // try really hard to delete stuff on windows, because it has a
+ // PROFOUNDLY annoying habit of not closing handles promptly when
+ // files are deleted, resulting in spurious ENOTEMPTY errors.
+ const retries = isWindows ? 100 : 1
+ let i = 0
+ do {
+ let threw = true
+ try {
+ const ret = options.rmdirSync(p, options)
+ threw = false
+ return ret
+ } finally {
+ if (++i < retries && threw)
+ continue
+ }
+ } while (true)
+}
+
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
/***/ }),
@@ -952,6 +907,1745 @@ class ExecState extends events.EventEmitter {
module.exports = require("tls");
+/***/ }),
+
+/***/ 18:
+/***/ (function() {
+
+eval("require")("encoding");
+
+
+/***/ }),
+
+/***/ 20:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const cp = __webpack_require__(129);
+const parse = __webpack_require__(568);
+const enoent = __webpack_require__(881);
+
+function spawn(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
+
+ // Hook into child process "exit" event to emit an error if the command
+ // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ enoent.hookChildProcess(spawned, parsed);
+
+ return spawned;
+}
+
+function spawnSync(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
+
+ // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
+
+ return result;
+}
+
+module.exports = spawn;
+module.exports.spawn = spawn;
+module.exports.sync = spawnSync;
+
+module.exports._parse = parse;
+module.exports._enoent = enoent;
+
+
+/***/ }),
+
+/***/ 34:
+/***/ (function(module) {
+
+module.exports = require("https");
+
+/***/ }),
+
+/***/ 36:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = parseOptions;
+
+const { Deprecation } = __webpack_require__(692);
+const { getUserAgent } = __webpack_require__(796);
+const once = __webpack_require__(969);
+
+const pkg = __webpack_require__(919);
+
+const deprecateOptionsTimeout = once((log, deprecation) =>
+ log.warn(deprecation)
+);
+const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation));
+const deprecateOptionsHeaders = once((log, deprecation) =>
+ log.warn(deprecation)
+);
+
+function parseOptions(options, log, hook) {
+ if (options.headers) {
+ options.headers = Object.keys(options.headers).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = options.headers[key];
+ return newObj;
+ }, {});
+ }
+
+ const clientDefaults = {
+ headers: options.headers || {},
+ request: options.request || {},
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ };
+
+ if (options.baseUrl) {
+ clientDefaults.baseUrl = options.baseUrl;
+ }
+
+ if (options.userAgent) {
+ clientDefaults.headers["user-agent"] = options.userAgent;
+ }
+
+ if (options.previews) {
+ clientDefaults.mediaType.previews = options.previews;
+ }
+
+ if (options.timeZone) {
+ clientDefaults.headers["time-zone"] = options.timeZone;
+ }
+
+ if (options.timeout) {
+ deprecateOptionsTimeout(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ clientDefaults.request.timeout = options.timeout;
+ }
+
+ if (options.agent) {
+ deprecateOptionsAgent(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ clientDefaults.request.agent = options.agent;
+ }
+
+ if (options.headers) {
+ deprecateOptionsHeaders(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ }
+
+ const userAgentOption = clientDefaults.headers["user-agent"];
+ const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`;
+
+ clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent]
+ .filter(Boolean)
+ .join(" ");
+
+ clientDefaults.request.hook = hook.bind(null, "request");
+
+ return clientDefaults;
+}
+
+
+/***/ }),
+
+/***/ 37:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const factory = __webpack_require__(513);
+
+module.exports = factory();
+
+
+/***/ }),
+
+/***/ 39:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = opts => {
+ opts = opts || {};
+
+ const env = opts.env || process.env;
+ const platform = opts.platform || process.platform;
+
+ if (platform !== 'win32') {
+ return 'PATH';
+ }
+
+ return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path';
+};
+
+
+/***/ }),
+
+/***/ 48:
+/***/ (function(module, exports) {
+
+exports = module.exports = SemVer
+
+var debug
+/* istanbul ignore next */
+if (typeof process === 'object' &&
+ process.env &&
+ process.env.NODE_DEBUG &&
+ /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
+ debug = function () {
+ var args = Array.prototype.slice.call(arguments, 0)
+ args.unshift('SEMVER')
+ console.log.apply(console, args)
+ }
+} else {
+ debug = function () {}
+}
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0'
+
+var MAX_LENGTH = 256
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
+ /* istanbul ignore next */ 9007199254740991
+
+// Max safe segment length for coercion.
+var MAX_SAFE_COMPONENT_LENGTH = 16
+
+// The actual regexps go on exports.re
+var re = exports.re = []
+var src = exports.src = []
+var R = 0
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+var NUMERICIDENTIFIER = R++
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
+var NUMERICIDENTIFIERLOOSE = R++
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+var NONNUMERICIDENTIFIER = R++
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+var MAINVERSION = R++
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')'
+
+var MAINVERSIONLOOSE = R++
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+var PRERELEASEIDENTIFIER = R++
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
+
+var PRERELEASEIDENTIFIERLOOSE = R++
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+var PRERELEASE = R++
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
+
+var PRERELEASELOOSE = R++
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+var BUILDIDENTIFIER = R++
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+var BUILD = R++
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+var FULL = R++
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?'
+
+src[FULL] = '^' + FULLPLAIN + '$'
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?'
+
+var LOOSE = R++
+src[LOOSE] = '^' + LOOSEPLAIN + '$'
+
+var GTLT = R++
+src[GTLT] = '((?:<|>)?=?)'
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
+var XRANGEIDENTIFIER = R++
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
+
+var XRANGEPLAIN = R++
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
+
+var XRANGEPLAINLOOSE = R++
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
+
+var XRANGE = R++
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
+var XRANGELOOSE = R++
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
+
+// Coercion.
+// Extract anything that could conceivably be a part of a valid semver
+var COERCE = R++
+src[COERCE] = '(?:^|[^\\d])' +
+ '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:$|[^\\d])'
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++
+src[LONETILDE] = '(?:~>?)'
+
+var TILDETRIM = R++
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
+var tildeTrimReplace = '$1~'
+
+var TILDE = R++
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
+var TILDELOOSE = R++
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++
+src[LONECARET] = '(?:\\^)'
+
+var CARETTRIM = R++
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
+var caretTrimReplace = '$1^'
+
+var CARET = R++
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
+var CARETLOOSE = R++
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
+var COMPARATOR = R++
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
+
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
+var comparatorTrimReplace = '$1$2$3'
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$'
+
+var HYPHENRANGELOOSE = R++
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$'
+
+// Star ranges basically just allow anything at all.
+var STAR = R++
+src[STAR] = '(<|>)?=?\\s*\\*'
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i])
+ if (!re[i]) {
+ re[i] = new RegExp(src[i])
+ }
+}
+
+exports.parse = parse
+function parse (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (version instanceof SemVer) {
+ return version
+ }
+
+ if (typeof version !== 'string') {
+ return null
+ }
+
+ if (version.length > MAX_LENGTH) {
+ return null
+ }
+
+ var r = options.loose ? re[LOOSE] : re[FULL]
+ if (!r.test(version)) {
+ return null
+ }
+
+ try {
+ return new SemVer(version, options)
+ } catch (er) {
+ return null
+ }
+}
+
+exports.valid = valid
+function valid (version, options) {
+ var v = parse(version, options)
+ return v ? v.version : null
+}
+
+exports.clean = clean
+function clean (version, options) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), options)
+ return s ? s.version : null
+}
+
+exports.SemVer = SemVer
+
+function SemVer (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+ if (version instanceof SemVer) {
+ if (version.loose === options.loose) {
+ return version
+ } else {
+ version = version.version
+ }
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version)
+ }
+
+ if (version.length > MAX_LENGTH) {
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+ }
+
+ if (!(this instanceof SemVer)) {
+ return new SemVer(version, options)
+ }
+
+ debug('SemVer', version, options)
+ this.options = options
+ this.loose = !!options.loose
+
+ var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
+
+ if (!m) {
+ throw new TypeError('Invalid Version: ' + version)
+ }
+
+ this.raw = version
+
+ // these are actually numbers
+ this.major = +m[1]
+ this.minor = +m[2]
+ this.patch = +m[3]
+
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
+ throw new TypeError('Invalid major version')
+ }
+
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
+ throw new TypeError('Invalid minor version')
+ }
+
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
+ throw new TypeError('Invalid patch version')
+ }
+
+ // numberify any prerelease numeric ids
+ if (!m[4]) {
+ this.prerelease = []
+ } else {
+ this.prerelease = m[4].split('.').map(function (id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id
+ if (num >= 0 && num < MAX_SAFE_INTEGER) {
+ return num
+ }
+ }
+ return id
+ })
+ }
+
+ this.build = m[5] ? m[5].split('.') : []
+ this.format()
+}
+
+SemVer.prototype.format = function () {
+ this.version = this.major + '.' + this.minor + '.' + this.patch
+ if (this.prerelease.length) {
+ this.version += '-' + this.prerelease.join('.')
+ }
+ return this.version
+}
+
+SemVer.prototype.toString = function () {
+ return this.version
+}
+
+SemVer.prototype.compare = function (other) {
+ debug('SemVer.compare', this.version, this.options, other)
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ return this.compareMain(other) || this.comparePre(other)
+}
+
+SemVer.prototype.compareMain = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch)
+}
+
+SemVer.prototype.comparePre = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length) {
+ return -1
+ } else if (!this.prerelease.length && other.prerelease.length) {
+ return 1
+ } else if (!this.prerelease.length && !other.prerelease.length) {
+ return 0
+ }
+
+ var i = 0
+ do {
+ var a = this.prerelease[i]
+ var b = other.prerelease[i]
+ debug('prerelease compare', i, a, b)
+ if (a === undefined && b === undefined) {
+ return 0
+ } else if (b === undefined) {
+ return 1
+ } else if (a === undefined) {
+ return -1
+ } else if (a === b) {
+ continue
+ } else {
+ return compareIdentifiers(a, b)
+ }
+ } while (++i)
+}
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function (release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor = 0
+ this.major++
+ this.inc('pre', identifier)
+ break
+ case 'preminor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor++
+ this.inc('pre', identifier)
+ break
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0
+ this.inc('patch', identifier)
+ this.inc('pre', identifier)
+ break
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0) {
+ this.inc('patch', identifier)
+ }
+ this.inc('pre', identifier)
+ break
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 ||
+ this.patch !== 0 ||
+ this.prerelease.length === 0) {
+ this.major++
+ }
+ this.minor = 0
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0) {
+ this.minor++
+ }
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0) {
+ this.patch++
+ }
+ this.prerelease = []
+ break
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0) {
+ this.prerelease = [0]
+ } else {
+ var i = this.prerelease.length
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++
+ i = -2
+ }
+ }
+ if (i === -1) {
+ // didn't increment anything
+ this.prerelease.push(0)
+ }
+ }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1])) {
+ this.prerelease = [identifier, 0]
+ }
+ } else {
+ this.prerelease = [identifier, 0]
+ }
+ }
+ break
+
+ default:
+ throw new Error('invalid increment argument: ' + release)
+ }
+ this.format()
+ this.raw = this.version
+ return this
+}
+
+exports.inc = inc
+function inc (version, release, loose, identifier) {
+ if (typeof (loose) === 'string') {
+ identifier = loose
+ loose = undefined
+ }
+
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version
+ } catch (er) {
+ return null
+ }
+}
+
+exports.diff = diff
+function diff (version1, version2) {
+ if (eq(version1, version2)) {
+ return null
+ } else {
+ var v1 = parse(version1)
+ var v2 = parse(version2)
+ var prefix = ''
+ if (v1.prerelease.length || v2.prerelease.length) {
+ prefix = 'pre'
+ var defaultResult = 'prerelease'
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return prefix + key
+ }
+ }
+ }
+ return defaultResult // may be undefined
+ }
+}
+
+exports.compareIdentifiers = compareIdentifiers
+
+var numeric = /^[0-9]+$/
+function compareIdentifiers (a, b) {
+ var anum = numeric.test(a)
+ var bnum = numeric.test(b)
+
+ if (anum && bnum) {
+ a = +a
+ b = +b
+ }
+
+ return a === b ? 0
+ : (anum && !bnum) ? -1
+ : (bnum && !anum) ? 1
+ : a < b ? -1
+ : 1
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers
+function rcompareIdentifiers (a, b) {
+ return compareIdentifiers(b, a)
+}
+
+exports.major = major
+function major (a, loose) {
+ return new SemVer(a, loose).major
+}
+
+exports.minor = minor
+function minor (a, loose) {
+ return new SemVer(a, loose).minor
+}
+
+exports.patch = patch
+function patch (a, loose) {
+ return new SemVer(a, loose).patch
+}
+
+exports.compare = compare
+function compare (a, b, loose) {
+ return new SemVer(a, loose).compare(new SemVer(b, loose))
+}
+
+exports.compareLoose = compareLoose
+function compareLoose (a, b) {
+ return compare(a, b, true)
+}
+
+exports.rcompare = rcompare
+function rcompare (a, b, loose) {
+ return compare(b, a, loose)
+}
+
+exports.sort = sort
+function sort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.compare(a, b, loose)
+ })
+}
+
+exports.rsort = rsort
+function rsort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.rcompare(a, b, loose)
+ })
+}
+
+exports.gt = gt
+function gt (a, b, loose) {
+ return compare(a, b, loose) > 0
+}
+
+exports.lt = lt
+function lt (a, b, loose) {
+ return compare(a, b, loose) < 0
+}
+
+exports.eq = eq
+function eq (a, b, loose) {
+ return compare(a, b, loose) === 0
+}
+
+exports.neq = neq
+function neq (a, b, loose) {
+ return compare(a, b, loose) !== 0
+}
+
+exports.gte = gte
+function gte (a, b, loose) {
+ return compare(a, b, loose) >= 0
+}
+
+exports.lte = lte
+function lte (a, b, loose) {
+ return compare(a, b, loose) <= 0
+}
+
+exports.cmp = cmp
+function cmp (a, op, b, loose) {
+ switch (op) {
+ case '===':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a === b
+
+ case '!==':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a !== b
+
+ case '':
+ case '=':
+ case '==':
+ return eq(a, b, loose)
+
+ case '!=':
+ return neq(a, b, loose)
+
+ case '>':
+ return gt(a, b, loose)
+
+ case '>=':
+ return gte(a, b, loose)
+
+ case '<':
+ return lt(a, b, loose)
+
+ case '<=':
+ return lte(a, b, loose)
+
+ default:
+ throw new TypeError('Invalid operator: ' + op)
+ }
+}
+
+exports.Comparator = Comparator
+function Comparator (comp, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (comp instanceof Comparator) {
+ if (comp.loose === !!options.loose) {
+ return comp
+ } else {
+ comp = comp.value
+ }
+ }
+
+ if (!(this instanceof Comparator)) {
+ return new Comparator(comp, options)
+ }
+
+ debug('comparator', comp, options)
+ this.options = options
+ this.loose = !!options.loose
+ this.parse(comp)
+
+ if (this.semver === ANY) {
+ this.value = ''
+ } else {
+ this.value = this.operator + this.semver.version
+ }
+
+ debug('comp', this)
+}
+
+var ANY = {}
+Comparator.prototype.parse = function (comp) {
+ var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var m = comp.match(r)
+
+ if (!m) {
+ throw new TypeError('Invalid comparator: ' + comp)
+ }
+
+ this.operator = m[1]
+ if (this.operator === '=') {
+ this.operator = ''
+ }
+
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2]) {
+ this.semver = ANY
+ } else {
+ this.semver = new SemVer(m[2], this.options.loose)
+ }
+}
+
+Comparator.prototype.toString = function () {
+ return this.value
+}
+
+Comparator.prototype.test = function (version) {
+ debug('Comparator.test', version, this.options.loose)
+
+ if (this.semver === ANY) {
+ return true
+ }
+
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
+ }
+
+ return cmp(version, this.operator, this.semver, this.options)
+}
+
+Comparator.prototype.intersects = function (comp, options) {
+ if (!(comp instanceof Comparator)) {
+ throw new TypeError('a Comparator is required')
+ }
+
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ var rangeTmp
+
+ if (this.operator === '') {
+ rangeTmp = new Range(comp.value, options)
+ return satisfies(this.value, rangeTmp, options)
+ } else if (comp.operator === '') {
+ rangeTmp = new Range(this.value, options)
+ return satisfies(comp.semver, rangeTmp, options)
+ }
+
+ var sameDirectionIncreasing =
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '>=' || comp.operator === '>')
+ var sameDirectionDecreasing =
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '<=' || comp.operator === '<')
+ var sameSemVer = this.semver.version === comp.semver.version
+ var differentDirectionsInclusive =
+ (this.operator === '>=' || this.operator === '<=') &&
+ (comp.operator === '>=' || comp.operator === '<=')
+ var oppositeDirectionsLessThan =
+ cmp(this.semver, '<', comp.semver, options) &&
+ ((this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '<=' || comp.operator === '<'))
+ var oppositeDirectionsGreaterThan =
+ cmp(this.semver, '>', comp.semver, options) &&
+ ((this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '>=' || comp.operator === '>'))
+
+ return sameDirectionIncreasing || sameDirectionDecreasing ||
+ (sameSemVer && differentDirectionsInclusive) ||
+ oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
+}
+
+exports.Range = Range
+function Range (range, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (range instanceof Range) {
+ if (range.loose === !!options.loose &&
+ range.includePrerelease === !!options.includePrerelease) {
+ return range
+ } else {
+ return new Range(range.raw, options)
+ }
+ }
+
+ if (range instanceof Comparator) {
+ return new Range(range.value, options)
+ }
+
+ if (!(this instanceof Range)) {
+ return new Range(range, options)
+ }
+
+ this.options = options
+ this.loose = !!options.loose
+ this.includePrerelease = !!options.includePrerelease
+
+ // First, split based on boolean or ||
+ this.raw = range
+ this.set = range.split(/\s*\|\|\s*/).map(function (range) {
+ return this.parseRange(range.trim())
+ }, this).filter(function (c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length
+ })
+
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range)
+ }
+
+ this.format()
+}
+
+Range.prototype.format = function () {
+ this.range = this.set.map(function (comps) {
+ return comps.join(' ').trim()
+ }).join('||').trim()
+ return this.range
+}
+
+Range.prototype.toString = function () {
+ return this.range
+}
+
+Range.prototype.parseRange = function (range) {
+ var loose = this.options.loose
+ range = range.trim()
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
+ range = range.replace(hr, hyphenReplace)
+ debug('hyphen replace', range)
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
+ debug('comparator trim', range, re[COMPARATORTRIM])
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace)
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace)
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ')
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var set = range.split(' ').map(function (comp) {
+ return parseComparator(comp, this.options)
+ }, this).join(' ').split(/\s+/)
+ if (this.options.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function (comp) {
+ return !!comp.match(compRe)
+ })
+ }
+ set = set.map(function (comp) {
+ return new Comparator(comp, this.options)
+ }, this)
+
+ return set
+}
+
+Range.prototype.intersects = function (range, options) {
+ if (!(range instanceof Range)) {
+ throw new TypeError('a Range is required')
+ }
+
+ return this.set.some(function (thisComparators) {
+ return thisComparators.every(function (thisComparator) {
+ return range.set.some(function (rangeComparators) {
+ return rangeComparators.every(function (rangeComparator) {
+ return thisComparator.intersects(rangeComparator, options)
+ })
+ })
+ })
+ })
+}
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators
+function toComparators (range, options) {
+ return new Range(range, options).set.map(function (comp) {
+ return comp.map(function (c) {
+ return c.value
+ }).join(' ').trim().split(' ')
+ })
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator (comp, options) {
+ debug('comp', comp, options)
+ comp = replaceCarets(comp, options)
+ debug('caret', comp)
+ comp = replaceTildes(comp, options)
+ debug('tildes', comp)
+ comp = replaceXRanges(comp, options)
+ debug('xrange', comp)
+ comp = replaceStars(comp, options)
+ debug('stars', comp)
+ return comp
+}
+
+function isX (id) {
+ return !id || id.toLowerCase() === 'x' || id === '*'
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceTilde(comp, options)
+ }).join(' ')
+}
+
+function replaceTilde (comp, options) {
+ var r = options.loose ? re[TILDELOOSE] : re[TILDE]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr)
+ var ret
+
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ // ~1.2 == >=1.2.0 <1.3.0
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else if (pr) {
+ debug('replaceTilde pr', pr)
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+
+ debug('tilde return', ret)
+ return ret
+ })
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceCaret(comp, options)
+ }).join(' ')
+}
+
+function replaceCaret (comp, options) {
+ debug('caret', comp, options)
+ var r = options.loose ? re[CARETLOOSE] : re[CARET]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr)
+ var ret
+
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ if (M === '0') {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
+ }
+ } else if (pr) {
+ debug('replaceCaret pr', pr)
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + (+M + 1) + '.0.0'
+ }
+ } else {
+ debug('no pr')
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0'
+ }
+ }
+
+ debug('caret return', ret)
+ return ret
+ })
+}
+
+function replaceXRanges (comp, options) {
+ debug('replaceXRanges', comp, options)
+ return comp.split(/\s+/).map(function (comp) {
+ return replaceXRange(comp, options)
+ }).join(' ')
+}
+
+function replaceXRange (comp, options) {
+ comp = comp.trim()
+ var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
+ return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr)
+ var xM = isX(M)
+ var xm = xM || isX(m)
+ var xp = xm || isX(p)
+ var anyX = xp
+
+ if (gtlt === '=' && anyX) {
+ gtlt = ''
+ }
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0'
+ } else {
+ // nothing is forbidden
+ ret = '*'
+ }
+ } else if (gtlt && anyX) {
+ // we know patch is an x, because we have any x at all.
+ // replace X with 0
+ if (xm) {
+ m = 0
+ }
+ p = 0
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>='
+ if (xm) {
+ M = +M + 1
+ m = 0
+ p = 0
+ } else {
+ m = +m + 1
+ p = 0
+ }
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<'
+ if (xm) {
+ M = +M + 1
+ } else {
+ m = +m + 1
+ }
+ }
+
+ ret = gtlt + M + '.' + m + '.' + p
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ }
+
+ debug('xRange return', ret)
+
+ return ret
+ })
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars (comp, options) {
+ debug('replaceStars', comp, options)
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '')
+}
+
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace ($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+ if (isX(fM)) {
+ from = ''
+ } else if (isX(fm)) {
+ from = '>=' + fM + '.0.0'
+ } else if (isX(fp)) {
+ from = '>=' + fM + '.' + fm + '.0'
+ } else {
+ from = '>=' + from
+ }
+
+ if (isX(tM)) {
+ to = ''
+ } else if (isX(tm)) {
+ to = '<' + (+tM + 1) + '.0.0'
+ } else if (isX(tp)) {
+ to = '<' + tM + '.' + (+tm + 1) + '.0'
+ } else if (tpr) {
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
+ } else {
+ to = '<=' + to
+ }
+
+ return (from + ' ' + to).trim()
+}
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function (version) {
+ if (!version) {
+ return false
+ }
+
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
+ }
+
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version, this.options)) {
+ return true
+ }
+ }
+ return false
+}
+
+function testSet (set, version, options) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version)) {
+ return false
+ }
+ }
+
+ if (version.prerelease.length && !options.includePrerelease) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (i = 0; i < set.length; i++) {
+ debug(set[i].semver)
+ if (set[i].semver === ANY) {
+ continue
+ }
+
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch) {
+ return true
+ }
+ }
+ }
+
+ // Version has a -pre, but it's not one of the ones we like.
+ return false
+ }
+
+ return true
+}
+
+exports.satisfies = satisfies
+function satisfies (version, range, options) {
+ try {
+ range = new Range(range, options)
+ } catch (er) {
+ return false
+ }
+ return range.test(version)
+}
+
+exports.maxSatisfying = maxSatisfying
+function maxSatisfying (versions, range, options) {
+ var max = null
+ var maxSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!max || maxSV.compare(v) === -1) {
+ // compare(max, v, true)
+ max = v
+ maxSV = new SemVer(max, options)
+ }
+ }
+ })
+ return max
+}
+
+exports.minSatisfying = minSatisfying
+function minSatisfying (versions, range, options) {
+ var min = null
+ var minSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!min || minSV.compare(v) === 1) {
+ // compare(min, v, true)
+ min = v
+ minSV = new SemVer(min, options)
+ }
+ }
+ })
+ return min
+}
+
+exports.minVersion = minVersion
+function minVersion (range, loose) {
+ range = new Range(range, loose)
+
+ var minver = new SemVer('0.0.0')
+ if (range.test(minver)) {
+ return minver
+ }
+
+ minver = new SemVer('0.0.0-0')
+ if (range.test(minver)) {
+ return minver
+ }
+
+ minver = null
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+
+ comparators.forEach(function (comparator) {
+ // Clone to avoid manipulating the comparator's semver object.
+ var compver = new SemVer(comparator.semver.version)
+ switch (comparator.operator) {
+ case '>':
+ if (compver.prerelease.length === 0) {
+ compver.patch++
+ } else {
+ compver.prerelease.push(0)
+ }
+ compver.raw = compver.format()
+ /* fallthrough */
+ case '':
+ case '>=':
+ if (!minver || gt(minver, compver)) {
+ minver = compver
+ }
+ break
+ case '<':
+ case '<=':
+ /* Ignore maximum versions */
+ break
+ /* istanbul ignore next */
+ default:
+ throw new Error('Unexpected operation: ' + comparator.operator)
+ }
+ })
+ }
+
+ if (minver && range.test(minver)) {
+ return minver
+ }
+
+ return null
+}
+
+exports.validRange = validRange
+function validRange (range, options) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, options).range || '*'
+ } catch (er) {
+ return null
+ }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr
+function ltr (version, range, options) {
+ return outside(version, range, '<', options)
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr
+function gtr (version, range, options) {
+ return outside(version, range, '>', options)
+}
+
+exports.outside = outside
+function outside (version, range, hilo, options) {
+ version = new SemVer(version, options)
+ range = new Range(range, options)
+
+ var gtfn, ltefn, ltfn, comp, ecomp
+ switch (hilo) {
+ case '>':
+ gtfn = gt
+ ltefn = lte
+ ltfn = lt
+ comp = '>'
+ ecomp = '>='
+ break
+ case '<':
+ gtfn = lt
+ ltefn = gte
+ ltfn = gt
+ comp = '<'
+ ecomp = '<='
+ break
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"')
+ }
+
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, options)) {
+ return false
+ }
+
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
+
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+
+ var high = null
+ var low = null
+
+ comparators.forEach(function (comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator
+ low = low || comparator
+ if (gtfn(comparator.semver, high.semver, options)) {
+ high = comparator
+ } else if (ltfn(comparator.semver, low.semver, options)) {
+ low = comparator
+ }
+ })
+
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false
+ }
+
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false
+ }
+ }
+ return true
+}
+
+exports.prerelease = prerelease
+function prerelease (version, options) {
+ var parsed = parse(version, options)
+ return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
+}
+
+exports.intersects = intersects
+function intersects (r1, r2, options) {
+ r1 = new Range(r1, options)
+ r2 = new Range(r2, options)
+ return r1.intersects(r2)
+}
+
+exports.coerce = coerce
+function coerce (version) {
+ if (version instanceof SemVer) {
+ return version
+ }
+
+ if (typeof version !== 'string') {
+ return null
+ }
+
+ var match = version.match(re[COERCE])
+
+ if (match == null) {
+ return null
+ }
+
+ return parse(match[1] +
+ '.' + (match[2] || '0') +
+ '.' + (match[3] || '0'))
+}
+
+
+/***/ }),
+
+/***/ 49:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const os = __webpack_require__(87);
+const execa = __webpack_require__(955);
+
+// Reference: https://www.gaijin.at/en/lstwinver.php
+const names = new Map([
+ ['10.0', '10'],
+ ['6.3', '8.1'],
+ ['6.2', '8'],
+ ['6.1', '7'],
+ ['6.0', 'Vista'],
+ ['5.2', 'Server 2003'],
+ ['5.1', 'XP'],
+ ['5.0', '2000'],
+ ['4.9', 'ME'],
+ ['4.1', '98'],
+ ['4.0', '95']
+]);
+
+const windowsRelease = release => {
+ const version = /\d+\.\d/.exec(release || os.release());
+
+ if (release && !version) {
+ throw new Error('`release` argument doesn\'t match `n.n`');
+ }
+
+ const ver = (version || [])[0];
+
+ // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
+ // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
+ // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
+ // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead.
+ // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name.
+ if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) {
+ let stdout;
+ try {
+ stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || '';
+ } catch (_) {
+ stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || '';
+ }
+
+ const year = (stdout.match(/2008|2012|2016|2019/) || [])[0];
+
+ if (year) {
+ return `Server ${year}`;
+ }
+ }
+
+ return names.get(ver);
+};
+
+module.exports = windowsRelease;
+
+
/***/ }),
/***/ 52:
@@ -1062,6 +2756,195 @@ function findLintVersion() {
exports.findLintVersion = findLintVersion;
+/***/ }),
+
+/***/ 63:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = validate;
+
+const { RequestError } = __webpack_require__(102);
+const get = __webpack_require__(854);
+const set = __webpack_require__(883);
+
+function validate(octokit, options) {
+ if (!options.request.validate) {
+ return;
+ }
+ const { validate: params } = options.request;
+
+ Object.keys(params).forEach(parameterName => {
+ const parameter = get(params, parameterName);
+
+ const expectedType = parameter.type;
+ let parentParameterName;
+ let parentValue;
+ let parentParamIsPresent = true;
+ let parentParameterIsArray = false;
+
+ if (/\./.test(parameterName)) {
+ parentParameterName = parameterName.replace(/\.[^.]+$/, "");
+ parentParameterIsArray = parentParameterName.slice(-2) === "[]";
+ if (parentParameterIsArray) {
+ parentParameterName = parentParameterName.slice(0, -2);
+ }
+ parentValue = get(options, parentParameterName);
+ parentParamIsPresent =
+ parentParameterName === "headers" ||
+ (typeof parentValue === "object" && parentValue !== null);
+ }
+
+ const values = parentParameterIsArray
+ ? (get(options, parentParameterName) || []).map(
+ value => value[parameterName.split(/\./).pop()]
+ )
+ : [get(options, parameterName)];
+
+ values.forEach((value, i) => {
+ const valueIsPresent = typeof value !== "undefined";
+ const valueIsNull = value === null;
+ const currentParameterName = parentParameterIsArray
+ ? parameterName.replace(/\[\]/, `[${i}]`)
+ : parameterName;
+
+ if (!parameter.required && !valueIsPresent) {
+ return;
+ }
+
+ // if the parent parameter is of type object but allows null
+ // then the child parameters can be ignored
+ if (!parentParamIsPresent) {
+ return;
+ }
+
+ if (parameter.allowNull && valueIsNull) {
+ return;
+ }
+
+ if (!parameter.allowNull && valueIsNull) {
+ throw new RequestError(
+ `'${currentParameterName}' cannot be null`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ if (parameter.required && !valueIsPresent) {
+ throw new RequestError(
+ `Empty value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ // parse to integer before checking for enum
+ // so that string "1" will match enum with number 1
+ if (expectedType === "integer") {
+ const unparsedValue = value;
+ value = parseInt(value, 10);
+ if (isNaN(value)) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ unparsedValue
+ )} is NaN`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ if (parameter.validation) {
+ const regex = new RegExp(parameter.validation);
+ if (!regex.test(value)) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ if (expectedType === "object" && typeof value === "string") {
+ try {
+ value = JSON.parse(value);
+ } catch (exception) {
+ throw new RequestError(
+ `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ set(options, parameter.mapTo || currentParameterName, value);
+ });
+ });
+
+ return options;
+}
+
+
+/***/ }),
+
+/***/ 85:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = withAuthorizationPrefix;
+
+const atob = __webpack_require__(368);
+
+const REGEX_IS_BASIC_AUTH = /^[\w-]+:/;
+
+function withAuthorizationPrefix(authorization) {
+ if (/^(basic|bearer|token) /i.test(authorization)) {
+ return authorization;
+ }
+
+ try {
+ if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) {
+ return `basic ${authorization}`;
+ }
+ } catch (error) {}
+
+ if (authorization.split(/\./).length === 3) {
+ return `bearer ${authorization}`;
+ }
+
+ return `token ${authorization}`;
+}
+
+
/***/ }),
/***/ 87:
@@ -1999,6 +3882,69 @@ function regExpEscape (s) {
}
+/***/ }),
+
+/***/ 102:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __webpack_require__(692);
+var once = _interopDefault(__webpack_require__(969));
+
+const logOnce = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ this.headers = options.headers || {}; // redact request credentials without mutating original request options
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
/***/ }),
/***/ 105:
@@ -2032,6 +3978,1259 @@ function bytesToUuid(buf, offset) {
module.exports = bytesToUuid;
+/***/ }),
+
+/***/ 117:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+var pathModule = __webpack_require__(622);
+var isWindows = process.platform === 'win32';
+var fs = __webpack_require__(747);
+
+// JavaScript implementation of realpath, ported from node pre-v6
+
+var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
+
+function rethrow() {
+ // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
+ // is fairly slow to generate.
+ var callback;
+ if (DEBUG) {
+ var backtrace = new Error;
+ callback = debugCallback;
+ } else
+ callback = missingCallback;
+
+ return callback;
+
+ function debugCallback(err) {
+ if (err) {
+ backtrace.message = err.message;
+ err = backtrace;
+ missingCallback(err);
+ }
+ }
+
+ function missingCallback(err) {
+ if (err) {
+ if (process.throwDeprecation)
+ throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+ else if (!process.noDeprecation) {
+ var msg = 'fs: missing callback ' + (err.stack || err.message);
+ if (process.traceDeprecation)
+ console.trace(msg);
+ else
+ console.error(msg);
+ }
+ }
+ }
+}
+
+function maybeCallback(cb) {
+ return typeof cb === 'function' ? cb : rethrow();
+}
+
+var normalize = pathModule.normalize;
+
+// Regexp that finds the next partion of a (partial) path
+// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
+if (isWindows) {
+ var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
+} else {
+ var nextPartRe = /(.*?)(?:[\/]+|$)/g;
+}
+
+// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
+if (isWindows) {
+ var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
+} else {
+ var splitRootRe = /^[\/]*/;
+}
+
+exports.realpathSync = function realpathSync(p, cache) {
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return cache[p];
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstatSync(base);
+ knownHard[base] = true;
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ // NB: p.length changes.
+ while (pos < p.length) {
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ continue;
+ }
+
+ var resolvedLink;
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // some known symbolic link. no need to stat again.
+ resolvedLink = cache[base];
+ } else {
+ var stat = fs.lstatSync(base);
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ continue;
+ }
+
+ // read the link if it wasn't read before
+ // dev/ino always return 0 on windows, so skip the check.
+ var linkTarget = null;
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ linkTarget = seenLinks[id];
+ }
+ }
+ if (linkTarget === null) {
+ fs.statSync(base);
+ linkTarget = fs.readlinkSync(base);
+ }
+ resolvedLink = pathModule.resolve(previous, linkTarget);
+ // track this, if given a cache.
+ if (cache) cache[base] = resolvedLink;
+ if (!isWindows) seenLinks[id] = linkTarget;
+ }
+
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+
+ if (cache) cache[original] = p;
+
+ return p;
+};
+
+
+exports.realpath = function realpath(p, cache, cb) {
+ if (typeof cb !== 'function') {
+ cb = maybeCallback(cache);
+ cache = null;
+ }
+
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return process.nextTick(cb.bind(null, null, cache[p]));
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstat(base, function(err) {
+ if (err) return cb(err);
+ knownHard[base] = true;
+ LOOP();
+ });
+ } else {
+ process.nextTick(LOOP);
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ function LOOP() {
+ // stop if scanned past end of path
+ if (pos >= p.length) {
+ if (cache) cache[original] = p;
+ return cb(null, p);
+ }
+
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ return process.nextTick(LOOP);
+ }
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // known symbolic link. no need to stat again.
+ return gotResolvedLink(cache[base]);
+ }
+
+ return fs.lstat(base, gotStat);
+ }
+
+ function gotStat(err, stat) {
+ if (err) return cb(err);
+
+ // if not a symlink, skip to the next path part
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ return process.nextTick(LOOP);
+ }
+
+ // stat & read the link if not read before
+ // call gotTarget as soon as the link target is known
+ // dev/ino always return 0 on windows, so skip the check.
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ return gotTarget(null, seenLinks[id], base);
+ }
+ }
+ fs.stat(base, function(err) {
+ if (err) return cb(err);
+
+ fs.readlink(base, function(err, target) {
+ if (!isWindows) seenLinks[id] = target;
+ gotTarget(err, target);
+ });
+ });
+ }
+
+ function gotTarget(err, target, base) {
+ if (err) return cb(err);
+
+ var resolvedLink = pathModule.resolve(previous, target);
+ if (cache) cache[base] = resolvedLink;
+ gotResolvedLink(resolvedLink);
+ }
+
+ function gotResolvedLink(resolvedLink) {
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+};
+
+
+/***/ }),
+
+/***/ 118:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const os = __webpack_require__(87);
+
+const nameMap = new Map([
+ [19, 'Catalina'],
+ [18, 'Mojave'],
+ [17, 'High Sierra'],
+ [16, 'Sierra'],
+ [15, 'El Capitan'],
+ [14, 'Yosemite'],
+ [13, 'Mavericks'],
+ [12, 'Mountain Lion'],
+ [11, 'Lion'],
+ [10, 'Snow Leopard'],
+ [9, 'Leopard'],
+ [8, 'Tiger'],
+ [7, 'Panther'],
+ [6, 'Jaguar'],
+ [5, 'Puma']
+]);
+
+const macosRelease = release => {
+ release = Number((release || os.release()).split('.')[0]);
+ return {
+ name: nameMap.get(release),
+ version: '10.' + (release - 4)
+ };
+};
+
+module.exports = macosRelease;
+// TODO: remove this in the next major version
+module.exports.default = macosRelease;
+
+
+/***/ }),
+
+/***/ 126:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the size to enable large array optimizations. */
+var LARGE_ARRAY_SIZE = 200;
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]';
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * A specialized version of `_.includes` for arrays without support for
+ * specifying an index to search from.
+ *
+ * @private
+ * @param {Array} [array] The array to inspect.
+ * @param {*} target The value to search for.
+ * @returns {boolean} Returns `true` if `target` is found, else `false`.
+ */
+function arrayIncludes(array, value) {
+ var length = array ? array.length : 0;
+ return !!length && baseIndexOf(array, value, 0) > -1;
+}
+
+/**
+ * This function is like `arrayIncludes` except that it accepts a comparator.
+ *
+ * @private
+ * @param {Array} [array] The array to inspect.
+ * @param {*} target The value to search for.
+ * @param {Function} comparator The comparator invoked per element.
+ * @returns {boolean} Returns `true` if `target` is found, else `false`.
+ */
+function arrayIncludesWith(array, value, comparator) {
+ var index = -1,
+ length = array ? array.length : 0;
+
+ while (++index < length) {
+ if (comparator(value, array[index])) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/**
+ * The base implementation of `_.findIndex` and `_.findLastIndex` without
+ * support for iteratee shorthands.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {Function} predicate The function invoked per iteration.
+ * @param {number} fromIndex The index to search from.
+ * @param {boolean} [fromRight] Specify iterating from right to left.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function baseFindIndex(array, predicate, fromIndex, fromRight) {
+ var length = array.length,
+ index = fromIndex + (fromRight ? 1 : -1);
+
+ while ((fromRight ? index-- : ++index < length)) {
+ if (predicate(array[index], index, array)) {
+ return index;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.indexOf` without `fromIndex` bounds checks.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} value The value to search for.
+ * @param {number} fromIndex The index to search from.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function baseIndexOf(array, value, fromIndex) {
+ if (value !== value) {
+ return baseFindIndex(array, baseIsNaN, fromIndex);
+ }
+ var index = fromIndex - 1,
+ length = array.length;
+
+ while (++index < length) {
+ if (array[index] === value) {
+ return index;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNaN` without support for number objects.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`.
+ */
+function baseIsNaN(value) {
+ return value !== value;
+}
+
+/**
+ * Checks if a cache value for `key` exists.
+ *
+ * @private
+ * @param {Object} cache The cache to query.
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function cacheHas(cache, key) {
+ return cache.has(key);
+}
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/**
+ * Converts `set` to an array of its values.
+ *
+ * @private
+ * @param {Object} set The set to convert.
+ * @returns {Array} Returns the values.
+ */
+function setToArray(set) {
+ var index = -1,
+ result = Array(set.size);
+
+ set.forEach(function(value) {
+ result[++index] = value;
+ });
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ Set = getNative(root, 'Set'),
+ nativeCreate = getNative(Object, 'create');
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ *
+ * Creates an array cache object to store unique values.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [values] The values to cache.
+ */
+function SetCache(values) {
+ var index = -1,
+ length = values ? values.length : 0;
+
+ this.__data__ = new MapCache;
+ while (++index < length) {
+ this.add(values[index]);
+ }
+}
+
+/**
+ * Adds `value` to the array cache.
+ *
+ * @private
+ * @name add
+ * @memberOf SetCache
+ * @alias push
+ * @param {*} value The value to cache.
+ * @returns {Object} Returns the cache instance.
+ */
+function setCacheAdd(value) {
+ this.__data__.set(value, HASH_UNDEFINED);
+ return this;
+}
+
+/**
+ * Checks if `value` is in the array cache.
+ *
+ * @private
+ * @name has
+ * @memberOf SetCache
+ * @param {*} value The value to search for.
+ * @returns {number} Returns `true` if `value` is found, else `false`.
+ */
+function setCacheHas(value) {
+ return this.__data__.has(value);
+}
+
+// Add methods to `SetCache`.
+SetCache.prototype.add = SetCache.prototype.push = setCacheAdd;
+SetCache.prototype.has = setCacheHas;
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.uniqBy` without support for iteratee shorthands.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {Function} [iteratee] The iteratee invoked per element.
+ * @param {Function} [comparator] The comparator invoked per element.
+ * @returns {Array} Returns the new duplicate free array.
+ */
+function baseUniq(array, iteratee, comparator) {
+ var index = -1,
+ includes = arrayIncludes,
+ length = array.length,
+ isCommon = true,
+ result = [],
+ seen = result;
+
+ if (comparator) {
+ isCommon = false;
+ includes = arrayIncludesWith;
+ }
+ else if (length >= LARGE_ARRAY_SIZE) {
+ var set = iteratee ? null : createSet(array);
+ if (set) {
+ return setToArray(set);
+ }
+ isCommon = false;
+ includes = cacheHas;
+ seen = new SetCache;
+ }
+ else {
+ seen = iteratee ? [] : result;
+ }
+ outer:
+ while (++index < length) {
+ var value = array[index],
+ computed = iteratee ? iteratee(value) : value;
+
+ value = (comparator || value !== 0) ? value : 0;
+ if (isCommon && computed === computed) {
+ var seenIndex = seen.length;
+ while (seenIndex--) {
+ if (seen[seenIndex] === computed) {
+ continue outer;
+ }
+ }
+ if (iteratee) {
+ seen.push(computed);
+ }
+ result.push(value);
+ }
+ else if (!includes(seen, computed, comparator)) {
+ if (seen !== result) {
+ seen.push(computed);
+ }
+ result.push(value);
+ }
+ }
+ return result;
+}
+
+/**
+ * Creates a set object of `values`.
+ *
+ * @private
+ * @param {Array} values The values to add to the set.
+ * @returns {Object} Returns the new set.
+ */
+var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) {
+ return new Set(values);
+};
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a duplicate-free version of an array, using
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * for equality comparisons, in which only the first occurrence of each
+ * element is kept.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Array
+ * @param {Array} array The array to inspect.
+ * @returns {Array} Returns the new duplicate free array.
+ * @example
+ *
+ * _.uniq([2, 1, 2]);
+ * // => [2, 1]
+ */
+function uniq(array) {
+ return (array && array.length)
+ ? baseUniq(array)
+ : [];
+}
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * This method returns `undefined`.
+ *
+ * @static
+ * @memberOf _
+ * @since 2.3.0
+ * @category Util
+ * @example
+ *
+ * _.times(2, _.noop);
+ * // => [undefined, undefined]
+ */
+function noop() {
+ // No operation performed.
+}
+
+module.exports = uniq;
+
+
/***/ }),
/***/ 129:
@@ -2039,6 +5238,42 @@ module.exports = bytesToUuid;
module.exports = require("child_process");
+/***/ }),
+
+/***/ 135:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = Octokit;
+
+const { request } = __webpack_require__(753);
+const Hook = __webpack_require__(523);
+
+const parseClientOptions = __webpack_require__(36);
+
+function Octokit(plugins, options) {
+ options = options || {};
+ const hook = new Hook.Collection();
+ const log = Object.assign(
+ {
+ debug: () => {},
+ info: () => {},
+ warn: console.warn,
+ error: console.error
+ },
+ options && options.log
+ );
+ const api = {
+ hook,
+ log,
+ request: request.defaults(parseClientOptions(options, log, hook))
+ };
+
+ plugins.forEach(pluginFunction => pluginFunction(api, options));
+
+ return api;
+}
+
+
/***/ }),
/***/ 139:
@@ -2054,6 +5289,56 @@ module.exports = function nodeRNG() {
};
+/***/ }),
+
+/***/ 140:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationBeforeRequest;
+
+const btoa = __webpack_require__(675);
+const uniq = __webpack_require__(126);
+
+function authenticationBeforeRequest(state, options) {
+ if (!state.auth.type) {
+ return;
+ }
+
+ if (state.auth.type === "basic") {
+ const hash = btoa(`${state.auth.username}:${state.auth.password}`);
+ options.headers.authorization = `Basic ${hash}`;
+ return;
+ }
+
+ if (state.auth.type === "token") {
+ options.headers.authorization = `token ${state.auth.token}`;
+ return;
+ }
+
+ if (state.auth.type === "app") {
+ options.headers.authorization = `Bearer ${state.auth.token}`;
+ const acceptHeaders = options.headers.accept
+ .split(",")
+ .concat("application/vnd.github.machine-man-preview+json");
+ options.headers.accept = uniq(acceptHeaders)
+ .filter(Boolean)
+ .join(",");
+ return;
+ }
+
+ options.url += options.url.indexOf("?") === -1 ? "?" : "&";
+
+ if (state.auth.token) {
+ options.url += `access_token=${encodeURIComponent(state.auth.token)}`;
+ return;
+ }
+
+ const key = encodeURIComponent(state.auth.key);
+ const secret = encodeURIComponent(state.auth.secret);
+ options.url += `client_id=${key}&client_secret=${secret}`;
+}
+
+
/***/ }),
/***/ 141:
@@ -2065,7 +5350,7 @@ module.exports = function nodeRNG() {
var net = __webpack_require__(631);
var tls = __webpack_require__(16);
var http = __webpack_require__(605);
-var https = __webpack_require__(211);
+var https = __webpack_require__(34);
var events = __webpack_require__(614);
var assert = __webpack_require__(357);
var util = __webpack_require__(669);
@@ -2326,6 +5611,900 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
exports.debug = debug; // for test
+/***/ }),
+
+/***/ 145:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const pump = __webpack_require__(453);
+const bufferStream = __webpack_require__(966);
+
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
+}
+
+function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ options = Object.assign({maxBuffer: Infinity}, options);
+
+ const {maxBuffer} = options;
+
+ let stream;
+ return new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ if (error) { // A null check
+ error.bufferedData = stream.getBufferedValue();
+ }
+ reject(error);
+ };
+
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
+
+ resolve();
+ });
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ }).then(() => stream.getBufferedValue());
+}
+
+module.exports = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
+module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
+module.exports.MaxBufferError = MaxBufferError;
+
+
+/***/ }),
+
+/***/ 150:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+/*!
+ * Tmp
+ *
+ * Copyright (c) 2011-2017 KARASZI Istvan
+ *
+ * MIT Licensed
+ */
+
+/*
+ * Module dependencies.
+ */
+const fs = __webpack_require__(747);
+const os = __webpack_require__(87);
+const path = __webpack_require__(622);
+const crypto = __webpack_require__(417);
+const _c = { fs: fs.constants, os: os.constants };
+const rimraf = __webpack_require__(13);
+
+/*
+ * The working inner variables.
+ */
+const
+ // the random characters to choose from
+ RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
+
+ TEMPLATE_PATTERN = /XXXXXX/,
+
+ DEFAULT_TRIES = 3,
+
+ CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
+
+ // constants are off on the windows platform and will not match the actual errno codes
+ IS_WIN32 = os.platform() === 'win32',
+ EBADF = _c.EBADF || _c.os.errno.EBADF,
+ ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
+
+ DIR_MODE = 0o700 /* 448 */,
+ FILE_MODE = 0o600 /* 384 */,
+
+ EXIT = 'exit',
+
+ // this will hold the objects need to be removed on exit
+ _removeObjects = [],
+
+ // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
+ FN_RMDIR_SYNC = fs.rmdirSync.bind(fs),
+ FN_RIMRAF_SYNC = rimraf.sync;
+
+let
+ _gracefulCleanup = false;
+
+/**
+ * Gets a temporary file name.
+ *
+ * @param {(Options|tmpNameCallback)} options options or callback
+ * @param {?tmpNameCallback} callback the callback function
+ */
+function tmpName(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ try {
+ _assertAndSanitizeOptions(opts);
+ } catch (err) {
+ return cb(err);
+ }
+
+ let tries = opts.tries;
+ (function _getUniqueName() {
+ try {
+ const name = _generateTmpName(opts);
+
+ // check whether the path exists then retry if needed
+ fs.stat(name, function (err) {
+ /* istanbul ignore else */
+ if (!err) {
+ /* istanbul ignore else */
+ if (tries-- > 0) return _getUniqueName();
+
+ return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
+ }
+
+ cb(null, name);
+ });
+ } catch (err) {
+ cb(err);
+ }
+ }());
+}
+
+/**
+ * Synchronous version of tmpName.
+ *
+ * @param {Object} options
+ * @returns {string} the generated random name
+ * @throws {Error} if the options are invalid or could not generate a filename
+ */
+function tmpNameSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ _assertAndSanitizeOptions(opts);
+
+ let tries = opts.tries;
+ do {
+ const name = _generateTmpName(opts);
+ try {
+ fs.statSync(name);
+ } catch (e) {
+ return name;
+ }
+ } while (tries-- > 0);
+
+ throw new Error('Could not get a unique tmp filename, max tries reached');
+}
+
+/**
+ * Creates and opens a temporary file.
+ *
+ * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined
+ * @param {?fileCallback} callback
+ */
+function file(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ // gets a temporary filename
+ tmpName(opts, function _tmpNameCreated(err, name) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ // create and open the file
+ fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
+ /* istanbu ignore else */
+ if (err) return cb(err);
+
+ if (opts.discardDescriptor) {
+ return fs.close(fd, function _discardCallback(possibleErr) {
+ // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
+ return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false));
+ });
+ } else {
+ // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
+ // about the descriptor
+ const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
+ cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false));
+ }
+ });
+ });
+}
+
+/**
+ * Synchronous version of file.
+ *
+ * @param {Options} options
+ * @returns {FileSyncObject} object consists of name, fd and removeCallback
+ * @throws {Error} if cannot create a file
+ */
+function fileSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
+ const name = tmpNameSync(opts);
+ var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
+ /* istanbul ignore else */
+ if (opts.discardDescriptor) {
+ fs.closeSync(fd);
+ fd = undefined;
+ }
+
+ return {
+ name: name,
+ fd: fd,
+ removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true)
+ };
+}
+
+/**
+ * Creates a temporary directory.
+ *
+ * @param {(Options|dirCallback)} options the options or the callback function
+ * @param {?dirCallback} callback
+ */
+function dir(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ // gets a temporary filename
+ tmpName(opts, function _tmpNameCreated(err, name) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ // create the directory
+ fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false));
+ });
+ });
+}
+
+/**
+ * Synchronous version of dir.
+ *
+ * @param {Options} options
+ * @returns {DirSyncObject} object consists of name and removeCallback
+ * @throws {Error} if it cannot create a directory
+ */
+function dirSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ const name = tmpNameSync(opts);
+ fs.mkdirSync(name, opts.mode || DIR_MODE);
+
+ return {
+ name: name,
+ removeCallback: _prepareTmpDirRemoveCallback(name, opts, true)
+ };
+}
+
+/**
+ * Removes files asynchronously.
+ *
+ * @param {Object} fdPath
+ * @param {Function} next
+ * @private
+ */
+function _removeFileAsync(fdPath, next) {
+ const _handler = function (err) {
+ if (err && !_isENOENT(err)) {
+ // reraise any unanticipated error
+ return next(err);
+ }
+ next();
+ };
+
+ if (0 <= fdPath[0])
+ fs.close(fdPath[0], function () {
+ fs.unlink(fdPath[1], _handler);
+ });
+ else fs.unlink(fdPath[1], _handler);
+}
+
+/**
+ * Removes files synchronously.
+ *
+ * @param {Object} fdPath
+ * @private
+ */
+function _removeFileSync(fdPath) {
+ let rethrownException = null;
+ try {
+ if (0 <= fdPath[0]) fs.closeSync(fdPath[0]);
+ } catch (e) {
+ // reraise any unanticipated error
+ if (!_isEBADF(e) && !_isENOENT(e)) throw e;
+ } finally {
+ try {
+ fs.unlinkSync(fdPath[1]);
+ }
+ catch (e) {
+ // reraise any unanticipated error
+ if (!_isENOENT(e)) rethrownException = e;
+ }
+ }
+ if (rethrownException !== null) {
+ throw rethrownException;
+ }
+}
+
+/**
+ * Prepares the callback for removal of the temporary file.
+ *
+ * Returns either a sync callback or a async callback depending on whether
+ * fileSync or file was called, which is expressed by the sync parameter.
+ *
+ * @param {string} name the path of the file
+ * @param {number} fd file descriptor
+ * @param {Object} opts
+ * @param {boolean} sync
+ * @returns {fileCallback | fileCallbackSync}
+ * @private
+ */
+function _prepareTmpFileRemoveCallback(name, fd, opts, sync) {
+ const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync);
+ const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync);
+
+ if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
+
+ return sync ? removeCallbackSync : removeCallback;
+}
+
+/**
+ * Prepares the callback for removal of the temporary directory.
+ *
+ * Returns either a sync callback or a async callback depending on whether
+ * tmpFileSync or tmpFile was called, which is expressed by the sync parameter.
+ *
+ * @param {string} name
+ * @param {Object} opts
+ * @param {boolean} sync
+ * @returns {Function} the callback
+ * @private
+ */
+function _prepareTmpDirRemoveCallback(name, opts, sync) {
+ const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs);
+ const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC;
+ const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync);
+ const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync);
+ if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
+
+ return sync ? removeCallbackSync : removeCallback;
+}
+
+/**
+ * Creates a guarded function wrapping the removeFunction call.
+ *
+ * The cleanup callback is save to be called multiple times.
+ * Subsequent invocations will be ignored.
+ *
+ * @param {Function} removeFunction
+ * @param {string} fileOrDirName
+ * @param {boolean} sync
+ * @param {cleanupCallbackSync?} cleanupCallbackSync
+ * @returns {cleanupCallback | cleanupCallbackSync}
+ * @private
+ */
+function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) {
+ let called = false;
+
+ // if sync is true, the next parameter will be ignored
+ return function _cleanupCallback(next) {
+
+ /* istanbul ignore else */
+ if (!called) {
+ // remove cleanupCallback from cache
+ const toRemove = cleanupCallbackSync || _cleanupCallback;
+ const index = _removeObjects.indexOf(toRemove);
+ /* istanbul ignore else */
+ if (index >= 0) _removeObjects.splice(index, 1);
+
+ called = true;
+ if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) {
+ return removeFunction(fileOrDirName);
+ } else {
+ return removeFunction(fileOrDirName, next || function() {});
+ }
+ }
+ };
+}
+
+/**
+ * The garbage collector.
+ *
+ * @private
+ */
+function _garbageCollector() {
+ /* istanbul ignore else */
+ if (!_gracefulCleanup) return;
+
+ // the function being called removes itself from _removeObjects,
+ // loop until _removeObjects is empty
+ while (_removeObjects.length) {
+ try {
+ _removeObjects[0]();
+ } catch (e) {
+ // already removed?
+ }
+ }
+}
+
+/**
+ * Random name generator based on crypto.
+ * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
+ *
+ * @param {number} howMany
+ * @returns {string} the generated random name
+ * @private
+ */
+function _randomChars(howMany) {
+ let
+ value = [],
+ rnd = null;
+
+ // make sure that we do not fail because we ran out of entropy
+ try {
+ rnd = crypto.randomBytes(howMany);
+ } catch (e) {
+ rnd = crypto.pseudoRandomBytes(howMany);
+ }
+
+ for (var i = 0; i < howMany; i++) {
+ value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
+ }
+
+ return value.join('');
+}
+
+/**
+ * Helper which determines whether a string s is blank, that is undefined, or empty or null.
+ *
+ * @private
+ * @param {string} s
+ * @returns {Boolean} true whether the string s is blank, false otherwise
+ */
+function _isBlank(s) {
+ return s === null || _isUndefined(s) || !s.trim();
+}
+
+/**
+ * Checks whether the `obj` parameter is defined or not.
+ *
+ * @param {Object} obj
+ * @returns {boolean} true if the object is undefined
+ * @private
+ */
+function _isUndefined(obj) {
+ return typeof obj === 'undefined';
+}
+
+/**
+ * Parses the function arguments.
+ *
+ * This function helps to have optional arguments.
+ *
+ * @param {(Options|null|undefined|Function)} options
+ * @param {?Function} callback
+ * @returns {Array} parsed arguments
+ * @private
+ */
+function _parseArguments(options, callback) {
+ /* istanbul ignore else */
+ if (typeof options === 'function') {
+ return [{}, options];
+ }
+
+ /* istanbul ignore else */
+ if (_isUndefined(options)) {
+ return [{}, callback];
+ }
+
+ // copy options so we do not leak the changes we make internally
+ const actualOptions = {};
+ for (const key of Object.getOwnPropertyNames(options)) {
+ actualOptions[key] = options[key];
+ }
+
+ return [actualOptions, callback];
+}
+
+/**
+ * Generates a new temporary name.
+ *
+ * @param {Object} opts
+ * @returns {string} the new random name according to opts
+ * @private
+ */
+function _generateTmpName(opts) {
+
+ const tmpDir = opts.tmpdir;
+
+ /* istanbul ignore else */
+ if (!_isUndefined(opts.name))
+ return path.join(tmpDir, opts.dir, opts.name);
+
+ /* istanbul ignore else */
+ if (!_isUndefined(opts.template))
+ return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6));
+
+ // prefix and postfix
+ const name = [
+ opts.prefix ? opts.prefix : 'tmp',
+ '-',
+ process.pid,
+ '-',
+ _randomChars(12),
+ opts.postfix ? '-' + opts.postfix : ''
+ ].join('');
+
+ return path.join(tmpDir, opts.dir, name);
+}
+
+/**
+ * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing
+ * options.
+ *
+ * @param {Options} options
+ * @private
+ */
+function _assertAndSanitizeOptions(options) {
+
+ options.tmpdir = _getTmpDir(options);
+
+ const tmpDir = options.tmpdir;
+
+ /* istanbul ignore else */
+ if (!_isUndefined(options.name))
+ _assertIsRelative(options.name, 'name', tmpDir);
+ /* istanbul ignore else */
+ if (!_isUndefined(options.dir))
+ _assertIsRelative(options.dir, 'dir', tmpDir);
+ /* istanbul ignore else */
+ if (!_isUndefined(options.template)) {
+ _assertIsRelative(options.template, 'template', tmpDir);
+ if (!options.template.match(TEMPLATE_PATTERN))
+ throw new Error(`Invalid template, found "${options.template}".`);
+ }
+ /* istanbul ignore else */
+ if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0)
+ throw new Error(`Invalid tries, found "${options.tries}".`);
+
+ // if a name was specified we will try once
+ options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1;
+ options.keep = !!options.keep;
+ options.detachDescriptor = !!options.detachDescriptor;
+ options.discardDescriptor = !!options.discardDescriptor;
+ options.unsafeCleanup = !!options.unsafeCleanup;
+
+ // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
+ options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir));
+ options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir));
+ // sanitize further if template is relative to options.dir
+ options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template);
+
+ // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
+ options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name);
+ options.prefix = _isUndefined(options.prefix) ? '' : options.prefix;
+ options.postfix = _isUndefined(options.postfix) ? '' : options.postfix;
+}
+
+/**
+ * Resolve the specified path name in respect to tmpDir.
+ *
+ * The specified name might include relative path components, e.g. ../
+ * so we need to resolve in order to be sure that is is located inside tmpDir
+ *
+ * @param name
+ * @param tmpDir
+ * @returns {string}
+ * @private
+ */
+function _resolvePath(name, tmpDir) {
+ const sanitizedName = _sanitizeName(name);
+ if (sanitizedName.startsWith(tmpDir)) {
+ return path.resolve(sanitizedName);
+ } else {
+ return path.resolve(path.join(tmpDir, sanitizedName));
+ }
+}
+
+/**
+ * Sanitize the specified path name by removing all quote characters.
+ *
+ * @param name
+ * @returns {string}
+ * @private
+ */
+function _sanitizeName(name) {
+ if (_isBlank(name)) {
+ return name;
+ }
+ return name.replace(/["']/g, '');
+}
+
+/**
+ * Asserts whether specified name is relative to the specified tmpDir.
+ *
+ * @param {string} name
+ * @param {string} option
+ * @param {string} tmpDir
+ * @throws {Error}
+ * @private
+ */
+function _assertIsRelative(name, option, tmpDir) {
+ if (option === 'name') {
+ // assert that name is not absolute and does not contain a path
+ if (path.isAbsolute(name))
+ throw new Error(`${option} option must not contain an absolute path, found "${name}".`);
+ // must not fail on valid . or .. or similar such constructs
+ let basename = path.basename(name);
+ if (basename === '..' || basename === '.' || basename !== name)
+ throw new Error(`${option} option must not contain a path, found "${name}".`);
+ }
+ else { // if (option === 'dir' || option === 'template') {
+ // assert that dir or template are relative to tmpDir
+ if (path.isAbsolute(name) && !name.startsWith(tmpDir)) {
+ throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`);
+ }
+ let resolvedPath = _resolvePath(name, tmpDir);
+ if (!resolvedPath.startsWith(tmpDir))
+ throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`);
+ }
+}
+
+/**
+ * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
+ *
+ * @private
+ */
+function _isEBADF(error) {
+ return _isExpectedError(error, -EBADF, 'EBADF');
+}
+
+/**
+ * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
+ *
+ * @private
+ */
+function _isENOENT(error) {
+ return _isExpectedError(error, -ENOENT, 'ENOENT');
+}
+
+/**
+ * Helper to determine whether the expected error code matches the actual code and errno,
+ * which will differ between the supported node versions.
+ *
+ * - Node >= 7.0:
+ * error.code {string}
+ * error.errno {number} any numerical value will be negated
+ *
+ * CAVEAT
+ *
+ * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT
+ * is no different here.
+ *
+ * @param {SystemError} error
+ * @param {number} errno
+ * @param {string} code
+ * @private
+ */
+function _isExpectedError(error, errno, code) {
+ return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno;
+}
+
+/**
+ * Sets the graceful cleanup.
+ *
+ * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the
+ * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary
+ * object removals.
+ */
+function setGracefulCleanup() {
+ _gracefulCleanup = true;
+}
+
+/**
+ * Returns the currently configured tmp dir from os.tmpdir().
+ *
+ * @private
+ * @param {?Options} options
+ * @returns {string} the currently configured tmp dir
+ */
+function _getTmpDir(options) {
+ return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir()));
+}
+
+// Install process exit listener
+process.addListener(EXIT, _garbageCollector);
+
+/**
+ * Configuration options.
+ *
+ * @typedef {Object} Options
+ * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected
+ * @property {?number} tries the number of tries before give up the name generation
+ * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files
+ * @property {?string} template the "mkstemp" like filename template
+ * @property {?string} name fixed name relative to tmpdir or the specified dir option
+ * @property {?string} dir tmp directory relative to the root tmp directory in use
+ * @property {?string} prefix prefix for the generated name
+ * @property {?string} postfix postfix for the generated name
+ * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir
+ * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty
+ * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection
+ * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection
+ */
+
+/**
+ * @typedef {Object} FileSyncObject
+ * @property {string} name the name of the file
+ * @property {string} fd the file descriptor or -1 if the fd has been discarded
+ * @property {fileCallback} removeCallback the callback function to remove the file
+ */
+
+/**
+ * @typedef {Object} DirSyncObject
+ * @property {string} name the name of the directory
+ * @property {fileCallback} removeCallback the callback function to remove the directory
+ */
+
+/**
+ * @callback tmpNameCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ */
+
+/**
+ * @callback fileCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {number} fd the file descriptor or -1 if the fd had been discarded
+ * @param {cleanupCallback} fn the cleanup callback function
+ */
+
+/**
+ * @callback fileCallbackSync
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {number} fd the file descriptor or -1 if the fd had been discarded
+ * @param {cleanupCallbackSync} fn the cleanup callback function
+ */
+
+/**
+ * @callback dirCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {cleanupCallback} fn the cleanup callback function
+ */
+
+/**
+ * @callback dirCallbackSync
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {cleanupCallbackSync} fn the cleanup callback function
+ */
+
+/**
+ * Removes the temporary created file or directory.
+ *
+ * @callback cleanupCallback
+ * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed
+ */
+
+/**
+ * Removes the temporary created file or directory.
+ *
+ * @callback cleanupCallbackSync
+ */
+
+/**
+ * Callback function for function composition.
+ * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
+ *
+ * @callback simpleCallback
+ */
+
+// exporting all the needed methods
+
+// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
+// allow users to reconfigure the temporary directory
+Object.defineProperty(module.exports, 'tmpdir', {
+ enumerable: true,
+ configurable: false,
+ get: function () {
+ return _getTmpDir();
+ }
+});
+
+module.exports.dir = dir;
+module.exports.dirSync = dirSync;
+
+module.exports.file = file;
+module.exports.fileSync = fileSync;
+
+module.exports.tmpName = tmpName;
+module.exports.tmpNameSync = tmpNameSync;
+
+module.exports.setGracefulCleanup = setGracefulCleanup;
+
+
+/***/ }),
+
+/***/ 168:
+/***/ (function(module) {
+
+"use strict";
+
+const alias = ['stdin', 'stdout', 'stderr'];
+
+const hasAlias = opts => alias.some(x => Boolean(opts[x]));
+
+module.exports = opts => {
+ if (!opts) {
+ return null;
+ }
+
+ if (opts.stdio && hasAlias(opts)) {
+ throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
+ }
+
+ if (typeof opts.stdio === 'string') {
+ return opts.stdio;
+ }
+
+ const stdio = opts.stdio || [];
+
+ if (!Array.isArray(stdio)) {
+ throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
+ }
+
+ const result = [];
+ const len = Math.max(stdio.length, alias.length);
+
+ for (let i = 0; i < len; i++) {
+ let value = null;
+
+ if (stdio[i] !== undefined) {
+ value = stdio[i];
+ } else if (opts[alias[i]] !== undefined) {
+ value = opts[alias[i]];
+ }
+
+ result[i] = value;
+ }
+
+ return result;
+};
+
+
/***/ }),
/***/ 180:
@@ -2351,20 +6530,78 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
+const github = __importStar(__webpack_require__(469));
const child_process_1 = __webpack_require__(129);
const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
+const tmp_1 = __webpack_require__(150);
const util_1 = __webpack_require__(669);
const cache_1 = __webpack_require__(722);
const install_1 = __webpack_require__(655);
const version_1 = __webpack_require__(52);
const execShellCommand = util_1.promisify(child_process_1.exec);
+const writeFile = util_1.promisify(fs.writeFile);
+const createTempDir = util_1.promisify(tmp_1.dir);
function prepareLint() {
return __awaiter(this, void 0, void 0, function* () {
const versionConfig = yield version_1.findLintVersion();
return yield install_1.installLint(versionConfig);
});
}
+function fetchPatch() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const onlyNewIssues = core.getInput(`only-new-issues`, { required: true }).trim();
+ if (onlyNewIssues !== `false` && onlyNewIssues !== `true`) {
+ throw new Error(`invalid value of "only-new-issues": "${onlyNewIssues}", expected "true" or "false"`);
+ }
+ if (onlyNewIssues === `false`) {
+ return ``;
+ }
+ const ctx = github.context;
+ if (ctx.eventName !== `pull_request`) {
+ core.info(`Not fetching patch for showing only new issues because it's not a pull request context: event name is ${ctx.eventName}`);
+ return ``;
+ }
+ const pull = ctx.payload.pull_request;
+ if (!pull) {
+ core.warning(`No pull request in context`);
+ return ``;
+ }
+ const octokit = new github.GitHub(core.getInput(`github-token`, { required: true }));
+ let patch;
+ try {
+ const patchResp = yield octokit.pulls.get({
+ owner: ctx.repo.owner,
+ repo: ctx.repo.repo,
+ [`pull_number`]: pull.number,
+ mediaType: {
+ format: `diff`,
+ },
+ });
+ if (patchResp.status !== 200) {
+ core.warning(`failed to fetch pull request patch: response status is ${patchResp.status}`);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ patch = patchResp.data;
+ }
+ catch (err) {
+ console.warn(`failed to fetch pull request patch:`, err);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ try {
+ const tempDir = yield createTempDir();
+ const patchPath = path.join(tempDir, "pull.patch");
+ core.info(`Writing patch to ${patchPath}`);
+ yield writeFile(patchPath, patch);
+ return patchPath;
+ }
+ catch (err) {
+ console.warn(`failed to save pull request patch:`, err);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ });
+}
function prepareEnv() {
return __awaiter(this, void 0, void 0, function* () {
const startedAt = Date.now();
@@ -2372,11 +6609,13 @@ function prepareEnv() {
const restoreCachePromise = cache_1.restoreCache();
const prepareLintPromise = prepareLint();
const installGoPromise = install_1.installGo();
+ const patchPromise = fetchPatch();
const lintPath = yield prepareLintPromise;
yield installGoPromise;
yield restoreCachePromise;
+ const patchPath = yield patchPromise;
core.info(`Prepared env in ${Date.now() - startedAt}ms`);
- return lintPath;
+ return { lintPath, patchPath };
});
}
const printOutput = (res) => {
@@ -2387,26 +6626,47 @@ const printOutput = (res) => {
core.info(res.stderr);
}
};
-function runLint(lintPath) {
+function runLint(lintPath, patchPath) {
return __awaiter(this, void 0, void 0, function* () {
const debug = core.getInput(`debug`);
if (debug.split(`,`).includes(`cache`)) {
const res = yield execShellCommand(`${lintPath} cache status`);
printOutput(res);
}
- const args = core.getInput(`args`);
- if (args.includes(`-out-format`)) {
+ const userArgs = core.getInput(`args`);
+ const addedArgs = [];
+ const userArgNames = new Set();
+ userArgs
+ .split(/\s/)
+ .map(arg => arg.split(`=`)[0])
+ .filter(arg => arg.startsWith(`-`))
+ .forEach(arg => {
+ userArgNames.add(arg.replace(`-`, ``));
+ });
+ if (userArgNames.has(`out-format`)) {
throw new Error(`please, don't change out-format for golangci-lint: it can be broken in a future`);
}
+ addedArgs.push(`--out-format=github-actions`);
+ if (patchPath && (userArgNames.has(`new`) || userArgNames.has(`new-from-rev`) || userArgNames.has(`new-from-patch`))) {
+ throw new Error(`please, don't specify manually --new* args when requesting only new issues`);
+ }
+ addedArgs.push(`--new-from-patch=${patchPath}`);
+ // Override config values.
+ addedArgs.push(`--new=false`);
+ addedArgs.push(`--new-from-rev=`);
const workingDirectory = core.getInput(`working-directory`);
const cmdArgs = {};
- if (workingDirectory != ``) {
+ if (workingDirectory) {
+ if (patchPath) {
+ // TODO: make them compatible
+ throw new Error(`options working-directory and only-new-issues aren't compatible`);
+ }
if (!fs.existsSync(workingDirectory) || !fs.lstatSync(workingDirectory).isDirectory()) {
throw new Error(`working-directory (${workingDirectory}) was not a path`);
}
cmdArgs.cwd = path.resolve(workingDirectory);
}
- const cmd = `${lintPath} run --out-format=github-actions ${args}`.trimRight();
+ const cmd = `${lintPath} run ${addedArgs.join(` `)} ${userArgs}`.trimRight();
core.info(`Running [${cmd}] in [${cmdArgs.cwd}] ...`);
const startedAt = Date.now();
try {
@@ -2431,8 +6691,8 @@ function runLint(lintPath) {
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
- const lintPath = yield core.group(`prepare environment`, prepareEnv);
- yield core.group(`run golangci-lint`, () => runLint(lintPath));
+ const { lintPath, patchPath } = yield core.group(`prepare environment`, prepareEnv);
+ yield core.group(`run golangci-lint`, () => runLint(lintPath, patchPath));
}
catch (error) {
core.error(`Failed to run: ${error}, ${error.stack}`);
@@ -2455,6 +6715,54 @@ function postRun() {
exports.postRun = postRun;
+/***/ }),
+
+/***/ 197:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = isexe
+isexe.sync = sync
+
+var fs = __webpack_require__(747)
+
+function isexe (path, options, cb) {
+ fs.stat(path, function (er, stat) {
+ cb(er, er ? false : checkStat(stat, options))
+ })
+}
+
+function sync (path, options) {
+ return checkStat(fs.statSync(path), options)
+}
+
+function checkStat (stat, options) {
+ return stat.isFile() && checkMode(stat, options)
+}
+
+function checkMode (stat, options) {
+ var mod = stat.mode
+ var uid = stat.uid
+ var gid = stat.gid
+
+ var myUid = options.uid !== undefined ?
+ options.uid : process.getuid && process.getuid()
+ var myGid = options.gid !== undefined ?
+ options.gid : process.getgid && process.getgid()
+
+ var u = parseInt('100', 8)
+ var g = parseInt('010', 8)
+ var o = parseInt('001', 8)
+ var ug = u | g
+
+ var ret = (mod & o) ||
+ (mod & g) && gid === myGid ||
+ (mod & u) && uid === myUid ||
+ (mod & ug) && myUid === 0
+
+ return ret
+}
+
+
/***/ }),
/***/ 206:
@@ -2482,7 +6790,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(342));
-const constants_1 = __webpack_require__(929);
+const constants_1 = __webpack_require__(573);
const tar_1 = __webpack_require__(570);
const utils = __importStar(__webpack_require__(867));
function run() {
@@ -2565,9 +6873,32 @@ exports.default = run;
/***/ }),
/***/ 211:
-/***/ (function(module) {
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ return "";
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
-module.exports = require("https");
/***/ }),
@@ -2635,9 +6966,24549 @@ class PersonalAccessTokenCredentialHandler {
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+/***/ }),
+
+/***/ 240:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationRequestError;
+
+const { RequestError } = __webpack_require__(102);
+
+function authenticationRequestError(state, error, options) {
+ /* istanbul ignore next */
+ if (!error.headers) throw error;
+
+ const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
+ // handle "2FA required" error only
+ if (error.status !== 401 || !otpRequired) {
+ throw error;
+ }
+
+ if (
+ error.status === 401 &&
+ otpRequired &&
+ error.request &&
+ error.request.headers["x-github-otp"]
+ ) {
+ throw new RequestError(
+ "Invalid one-time password for two-factor authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ if (typeof state.auth.on2fa !== "function") {
+ throw new RequestError(
+ "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ return Promise.resolve()
+ .then(() => {
+ return state.auth.on2fa();
+ })
+ .then(oneTimePassword => {
+ const newOptions = Object.assign(options, {
+ headers: Object.assign(
+ { "x-github-otp": oneTimePassword },
+ options.headers
+ )
+ });
+ return state.octokit.request(newOptions);
+ });
+}
+
+
+/***/ }),
+
+/***/ 245:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = globSync
+globSync.GlobSync = GlobSync
+
+var fs = __webpack_require__(747)
+var rp = __webpack_require__(302)
+var minimatch = __webpack_require__(93)
+var Minimatch = minimatch.Minimatch
+var Glob = __webpack_require__(402).Glob
+var util = __webpack_require__(669)
+var path = __webpack_require__(622)
+var assert = __webpack_require__(357)
+var isAbsolute = __webpack_require__(681)
+var common = __webpack_require__(644)
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+function globSync (pattern, options) {
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ return new GlobSync(pattern, options).found
+}
+
+function GlobSync (pattern, options) {
+ if (!pattern)
+ throw new Error('must provide pattern')
+
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ if (!(this instanceof GlobSync))
+ return new GlobSync(pattern, options)
+
+ setopts(this, pattern, options)
+
+ if (this.noprocess)
+ return this
+
+ var n = this.minimatch.set.length
+ this.matches = new Array(n)
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false)
+ }
+ this._finish()
+}
+
+GlobSync.prototype._finish = function () {
+ assert(this instanceof GlobSync)
+ if (this.realpath) {
+ var self = this
+ this.matches.forEach(function (matchset, index) {
+ var set = self.matches[index] = Object.create(null)
+ for (var p in matchset) {
+ try {
+ p = self._makeAbs(p)
+ var real = rp.realpathSync(p, self.realpathCache)
+ set[real] = true
+ } catch (er) {
+ if (er.syscall === 'stat')
+ set[self._makeAbs(p)] = true
+ else
+ throw er
+ }
+ }
+ })
+ }
+ common.finish(this)
+}
+
+
+GlobSync.prototype._process = function (pattern, index, inGlobStar) {
+ assert(this instanceof GlobSync)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // See if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip processing
+ if (childrenIgnored(this, read))
+ return
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
+}
+
+
+GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
+ var entries = this._readdir(abs, inGlobStar)
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix.slice(-1) !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix)
+ newPattern = [prefix, e]
+ else
+ newPattern = [e]
+ this._process(newPattern.concat(remain), index, inGlobStar)
+ }
+}
+
+
+GlobSync.prototype._emitMatch = function (index, e) {
+ if (isIgnored(this, e))
+ return
+
+ var abs = this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute) {
+ e = abs
+ }
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ if (this.stat)
+ this._stat(e)
+}
+
+
+GlobSync.prototype._readdirInGlobStar = function (abs) {
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false)
+
+ var entries
+ var lstat
+ var stat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er.code === 'ENOENT') {
+ // lstat failed, doesn't exist
+ return null
+ }
+ }
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ this.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory())
+ this.cache[abs] = 'FILE'
+ else
+ entries = this._readdir(abs, false)
+
+ return entries
+}
+
+GlobSync.prototype._readdir = function (abs, inGlobStar) {
+ var entries
+
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return null
+
+ if (Array.isArray(c))
+ return c
+ }
+
+ try {
+ return this._readdirEntries(abs, fs.readdirSync(abs))
+ } catch (er) {
+ this._readdirError(abs, er)
+ return null
+ }
+}
+
+GlobSync.prototype._readdirEntries = function (abs, entries) {
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+
+ // mark and cache dir-ness
+ return entries
+}
+
+GlobSync.prototype._readdirError = function (f, er) {
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ throw error
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict)
+ throw er
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+}
+
+GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
+
+ var entries = this._readdir(abs, inGlobStar)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false)
+
+ var len = entries.length
+ var isSym = this.symlinks[abs]
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true)
+ }
+}
+
+GlobSync.prototype._processSimple = function (prefix, index) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var exists = this._stat(prefix)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+}
+
+// Returns either 'DIR', 'FILE', or false
+GlobSync.prototype._stat = function (f) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return false
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (!stat) {
+ var lstat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return false
+ }
+ }
+
+ if (lstat && lstat.isSymbolicLink()) {
+ try {
+ stat = fs.statSync(abs)
+ } catch (er) {
+ stat = lstat
+ }
+ } else {
+ stat = lstat
+ }
+ }
+
+ this.statCache[abs] = stat
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ return c
+}
+
+GlobSync.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+GlobSync.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+
+/***/ }),
+
+/***/ 260:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Note: since nyc uses this module to output coverage, any lines
+// that are in the direct sync flow of nyc's outputCoverage are
+// ignored, since we can never get coverage for them.
+var assert = __webpack_require__(357)
+var signals = __webpack_require__(654)
+var isWin = /^win/i.test(process.platform)
+
+var EE = __webpack_require__(614)
+/* istanbul ignore if */
+if (typeof EE !== 'function') {
+ EE = EE.EventEmitter
+}
+
+var emitter
+if (process.__signal_exit_emitter__) {
+ emitter = process.__signal_exit_emitter__
+} else {
+ emitter = process.__signal_exit_emitter__ = new EE()
+ emitter.count = 0
+ emitter.emitted = {}
+}
+
+// Because this emitter is a global, we have to check to see if a
+// previous version of this library failed to enable infinite listeners.
+// I know what you're about to say. But literally everything about
+// signal-exit is a compromise with evil. Get used to it.
+if (!emitter.infinite) {
+ emitter.setMaxListeners(Infinity)
+ emitter.infinite = true
+}
+
+module.exports = function (cb, opts) {
+ assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler')
+
+ if (loaded === false) {
+ load()
+ }
+
+ var ev = 'exit'
+ if (opts && opts.alwaysLast) {
+ ev = 'afterexit'
+ }
+
+ var remove = function () {
+ emitter.removeListener(ev, cb)
+ if (emitter.listeners('exit').length === 0 &&
+ emitter.listeners('afterexit').length === 0) {
+ unload()
+ }
+ }
+ emitter.on(ev, cb)
+
+ return remove
+}
+
+module.exports.unload = unload
+function unload () {
+ if (!loaded) {
+ return
+ }
+ loaded = false
+
+ signals.forEach(function (sig) {
+ try {
+ process.removeListener(sig, sigListeners[sig])
+ } catch (er) {}
+ })
+ process.emit = originalProcessEmit
+ process.reallyExit = originalProcessReallyExit
+ emitter.count -= 1
+}
+
+function emit (event, code, signal) {
+ if (emitter.emitted[event]) {
+ return
+ }
+ emitter.emitted[event] = true
+ emitter.emit(event, code, signal)
+}
+
+// { : , ... }
+var sigListeners = {}
+signals.forEach(function (sig) {
+ sigListeners[sig] = function listener () {
+ // If there are no other listeners, an exit is coming!
+ // Simplest way: remove us and then re-send the signal.
+ // We know that this will kill the process, so we can
+ // safely emit now.
+ var listeners = process.listeners(sig)
+ if (listeners.length === emitter.count) {
+ unload()
+ emit('exit', null, sig)
+ /* istanbul ignore next */
+ emit('afterexit', null, sig)
+ /* istanbul ignore next */
+ if (isWin && sig === 'SIGHUP') {
+ // "SIGHUP" throws an `ENOSYS` error on Windows,
+ // so use a supported signal instead
+ sig = 'SIGINT'
+ }
+ process.kill(process.pid, sig)
+ }
+ }
+})
+
+module.exports.signals = function () {
+ return signals
+}
+
+module.exports.load = load
+
+var loaded = false
+
+function load () {
+ if (loaded) {
+ return
+ }
+ loaded = true
+
+ // This is the number of onSignalExit's that are in play.
+ // It's important so that we can count the correct number of
+ // listeners on signals, and don't wait for the other one to
+ // handle it instead of us.
+ emitter.count += 1
+
+ signals = signals.filter(function (sig) {
+ try {
+ process.on(sig, sigListeners[sig])
+ return true
+ } catch (er) {
+ return false
+ }
+ })
+
+ process.emit = processEmit
+ process.reallyExit = processReallyExit
+}
+
+var originalProcessReallyExit = process.reallyExit
+function processReallyExit (code) {
+ process.exitCode = code || 0
+ emit('exit', process.exitCode, null)
+ /* istanbul ignore next */
+ emit('afterexit', process.exitCode, null)
+ /* istanbul ignore next */
+ originalProcessReallyExit.call(process, process.exitCode)
+}
+
+var originalProcessEmit = process.emit
+function processEmit (ev, arg) {
+ if (ev === 'exit') {
+ if (arg !== undefined) {
+ process.exitCode = arg
+ }
+ var ret = originalProcessEmit.apply(this, arguments)
+ emit('exit', process.exitCode, null)
+ /* istanbul ignore next */
+ emit('afterexit', process.exitCode, null)
+ return ret
+ } else {
+ return originalProcessEmit.apply(this, arguments)
+ }
+}
+
+
+/***/ }),
+
+/***/ 262:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const fs_1 = __webpack_require__(747);
+const os_1 = __webpack_require__(87);
+class Context {
+ /**
+ * Hydrate the context from the environment
+ */
+ constructor() {
+ this.payload = {};
+ if (process.env.GITHUB_EVENT_PATH) {
+ if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ }
+ else {
+ const path = process.env.GITHUB_EVENT_PATH;
+ process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
+ }
+ }
+ this.eventName = process.env.GITHUB_EVENT_NAME;
+ this.sha = process.env.GITHUB_SHA;
+ this.ref = process.env.GITHUB_REF;
+ this.workflow = process.env.GITHUB_WORKFLOW;
+ this.action = process.env.GITHUB_ACTION;
+ this.actor = process.env.GITHUB_ACTOR;
+ }
+ get issue() {
+ const payload = this.payload;
+ return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
+ }
+ get repo() {
+ if (process.env.GITHUB_REPOSITORY) {
+ const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
+ return { owner, repo };
+ }
+ if (this.payload.repository) {
+ return {
+ owner: this.payload.repository.owner.login,
+ repo: this.payload.repository.name
+ };
+ }
+ throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
+ }
+}
+exports.Context = Context;
+//# sourceMappingURL=context.js.map
+
+/***/ }),
+
+/***/ 265:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+const HttpError = __webpack_require__(297)
+
+function getPage (octokit, link, which, headers) {
+ deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ const url = getPageLinks(link)[which]
+
+ if (!url) {
+ const urlError = new HttpError(`No ${which} page found`, 404)
+ return Promise.reject(urlError)
+ }
+
+ const requestOptions = {
+ url,
+ headers: applyAcceptHeader(link, headers)
+ }
+
+ const promise = octokit.request(requestOptions)
+
+ return promise
+}
+
+function applyAcceptHeader (res, headers) {
+ const previous = res.headers && res.headers['x-github-media-type']
+
+ if (!previous || (headers && headers.accept)) {
+ return headers
+ }
+ headers = headers || {}
+ headers.accept = 'application/vnd.' + previous
+ .replace('; param=', '.')
+ .replace('; format=', '+')
+
+ return headers
+}
+
+
/***/ }),
/***/ 280:
+/***/ (function(module) {
+
+module.exports = register
+
+function register (state, name, method, options) {
+ if (typeof method !== 'function') {
+ throw new Error('method for before hook must be a function')
+ }
+
+ if (!options) {
+ options = {}
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options)
+ }, method)()
+ }
+
+ return Promise.resolve()
+ .then(function () {
+ if (!state.registry[name]) {
+ return method(options)
+ }
+
+ return (state.registry[name]).reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options)
+ }, method)()
+ })
+}
+
+
+/***/ }),
+
+/***/ 281:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const internal_globber_1 = __webpack_require__(760);
+/**
+ * Constructs a globber
+ *
+ * @param patterns Patterns separated by newlines
+ * @param options Glob options
+ */
+function create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield internal_globber_1.DefaultGlobber.create(patterns, options);
+ });
+}
+exports.create = create;
+//# sourceMappingURL=glob.js.map
+
+/***/ }),
+
+/***/ 284:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
+/***/ }),
+
+/***/ 297:
+/***/ (function(module) {
+
+module.exports = class HttpError extends Error {
+ constructor (message, code, headers) {
+ super(message)
+
+ // Maintains proper stack trace (only available on V8)
+ /* istanbul ignore next */
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor)
+ }
+
+ this.name = 'HttpError'
+ this.code = code
+ this.headers = headers
+ }
+}
+
+
+/***/ }),
+
+/***/ 302:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __webpack_require__(747)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __webpack_require__(117)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
+/***/ }),
+
+/***/ 306:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var concatMap = __webpack_require__(896);
+var balanced = __webpack_require__(284);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 315:
+/***/ (function(module) {
+
+if (typeof Object.create === 'function') {
+ // implementation from standard node.js 'util' module
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ ctor.prototype = Object.create(superCtor.prototype, {
+ constructor: {
+ value: ctor,
+ enumerable: false,
+ writable: true,
+ configurable: true
+ }
+ })
+ }
+ };
+} else {
+ // old school shim for old browsers
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ var TempCtor = function () {}
+ TempCtor.prototype = superCtor.prototype
+ ctor.prototype = new TempCtor()
+ ctor.prototype.constructor = ctor
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 322:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+const VERSION = "1.1.2";
+
+/**
+ * Some “list” response that can be paginated have a different response structure
+ *
+ * They have a `total_count` key in the response (search also has `incomplete_results`,
+ * /installation/repositories also has `repository_selection`), as well as a key with
+ * the list of the items which name varies from endpoint to endpoint:
+ *
+ * - https://developer.github.com/v3/search/#example (key `items`)
+ * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
+ * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
+ * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
+ * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
+ *
+ * Octokit normalizes these responses so that paginated results are always returned following
+ * the same structure. One challenge is that if the list response has only one page, no Link
+ * header is provided, so this header alone is not sufficient to check wether a response is
+ * paginated or not. For the exceptions with the namespace, a fallback check for the route
+ * paths has to be added in order to normalize the response. We cannot check for the total_count
+ * property because it also exists in the response of Get the combined status for a specific ref.
+ */
+const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/];
+function normalizePaginatedListResponse(octokit, url, response) {
+ const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
+ const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
+ if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way
+ // to retrieve the same information.
+
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+
+ response.data.total_count = totalCount;
+ Object.defineProperty(response.data, namespaceKey, {
+ get() {
+ octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
+ return Array.from(data);
+ }
+
+ });
+}
+
+function iterator(octokit, route, parameters) {
+ const options = octokit.request.endpoint(route, parameters);
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ next() {
+ if (!url) {
+ return Promise.resolve({
+ done: true
+ });
+ }
+
+ return octokit.request({
+ method,
+ url,
+ headers
+ }).then(response => {
+ normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format:
+ // '; rel="next", ; rel="last"'
+ // sets `url` to undefined if "next" URL is not present or `link` header is not set
+
+ url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
+ return {
+ value: response
+ };
+ });
+ }
+
+ })
+ };
+}
+
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = undefined;
+ }
+
+ return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+}
+
+function gather(octokit, results, iterator, mapFn) {
+ return iterator.next().then(result => {
+ if (result.done) {
+ return results;
+ }
+
+ let earlyExit = false;
+
+ function done() {
+ earlyExit = true;
+ }
+
+ results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+
+ if (earlyExit) {
+ return results;
+ }
+
+ return gather(octokit, results, iterator, mapFn);
+ });
+}
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
+ };
+}
+paginateRest.VERSION = VERSION;
+
+exports.paginateRest = paginateRest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 323:
+/***/ (function(module) {
+
+"use strict";
+
+
+var isStream = module.exports = function (stream) {
+ return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function';
+};
+
+isStream.writable = function (stream) {
+ return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object';
+};
+
+isStream.readable = function (stream) {
+ return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object';
+};
+
+isStream.duplex = function (stream) {
+ return isStream.writable(stream) && isStream.readable(stream);
+};
+
+isStream.transform = function (stream) {
+ return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object';
+};
+
+
+/***/ }),
+
+/***/ 327:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+/**
+ * Indicates whether a pattern matches a path
+ */
+var MatchKind;
+(function (MatchKind) {
+ /** Not matched */
+ MatchKind[MatchKind["None"] = 0] = "None";
+ /** Matched if the path is a directory */
+ MatchKind[MatchKind["Directory"] = 1] = "Directory";
+ /** Matched if the path is a regular file */
+ MatchKind[MatchKind["File"] = 2] = "File";
+ /** Matched */
+ MatchKind[MatchKind["All"] = 3] = "All";
+})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
+//# sourceMappingURL=internal-match-kind.js.map
+
+/***/ }),
+
+/***/ 336:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasLastPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasLastPage (link) {
+ deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).last
+}
+
+
+/***/ }),
+
+/***/ 341:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = octokitValidate;
+
+const validate = __webpack_require__(63);
+
+function octokitValidate(octokit) {
+ octokit.hook.before("request", validate.bind(null, octokit));
+}
+
+
+/***/ }),
+
+/***/ 342:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const http_client_1 = __webpack_require__(539);
+const auth_1 = __webpack_require__(226);
+const crypto = __importStar(__webpack_require__(417));
+const fs = __importStar(__webpack_require__(747));
+const stream = __importStar(__webpack_require__(413));
+const util = __importStar(__webpack_require__(669));
+const constants_1 = __webpack_require__(573);
+const utils = __importStar(__webpack_require__(867));
+const versionSalt = "1.0";
+function isSuccessStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ return statusCode >= 200 && statusCode < 300;
+}
+function isRetryableStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ const retryableStatusCodes = [
+ http_client_1.HttpCodes.BadGateway,
+ http_client_1.HttpCodes.ServiceUnavailable,
+ http_client_1.HttpCodes.GatewayTimeout
+ ];
+ return retryableStatusCodes.includes(statusCode);
+}
+function getCacheApiUrl(resource) {
+ // Ideally we just use ACTIONS_CACHE_URL
+ const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
+ process.env["ACTIONS_RUNTIME_URL"] ||
+ "").replace("pipelines", "artifactcache");
+ if (!baseUrl) {
+ throw new Error("Cache Service Url not found, unable to restore cache.");
+ }
+ const url = `${baseUrl}_apis/artifactcache/${resource}`;
+ core.debug(`Resource Url: ${url}`);
+ return url;
+}
+function createAcceptHeader(type, apiVersion) {
+ return `${type};api-version=${apiVersion}`;
+}
+function getRequestOptions() {
+ const requestOptions = {
+ headers: {
+ Accept: createAcceptHeader("application/json", "6.0-preview.1")
+ }
+ };
+ return requestOptions;
+}
+function createHttpClient() {
+ const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
+ const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
+ return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
+}
+function getCacheVersion() {
+ // Add salt to cache version to support breaking changes in cache entry
+ const components = [
+ core.getInput(constants_1.Inputs.Path, { required: true }),
+ versionSalt
+ ];
+ return crypto
+ .createHash("sha256")
+ .update(components.join("|"))
+ .digest("hex");
+}
+exports.getCacheVersion = getCacheVersion;
+function getCacheEntry(keys) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion();
+ const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
+ const response = yield httpClient.getJson(getCacheApiUrl(resource));
+ if (response.statusCode === 204) {
+ return null;
+ }
+ if (!isSuccessStatusCode(response.statusCode)) {
+ throw new Error(`Cache service responded with ${response.statusCode}`);
+ }
+ const cacheResult = response.result;
+ const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
+ if (!cacheDownloadUrl) {
+ throw new Error("Cache not found.");
+ }
+ core.setSecret(cacheDownloadUrl);
+ core.debug(`Cache Result:`);
+ core.debug(JSON.stringify(cacheResult));
+ return cacheResult;
+ });
+}
+exports.getCacheEntry = getCacheEntry;
+function pipeResponseToStream(response, output) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const pipeline = util.promisify(stream.pipeline);
+ yield pipeline(response.message, output);
+ });
+}
+function downloadCache(archiveLocation, archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stream = fs.createWriteStream(archivePath);
+ const httpClient = new http_client_1.HttpClient("actions/cache");
+ const downloadResponse = yield httpClient.get(archiveLocation);
+ // Abort download if no traffic received over the socket.
+ downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
+ downloadResponse.message.destroy();
+ core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
+ });
+ yield pipeResponseToStream(downloadResponse, stream);
+ // Validate download size.
+ const contentLengthHeader = downloadResponse.message.headers["content-length"];
+ if (contentLengthHeader) {
+ const expectedLength = parseInt(contentLengthHeader);
+ const actualLength = utils.getArchiveFileSize(archivePath);
+ if (actualLength != expectedLength) {
+ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
+ }
+ }
+ else {
+ core.debug("Unable to validate download, no Content-Length header");
+ }
+ });
+}
+exports.downloadCache = downloadCache;
+// Reserve Cache
+function reserveCache(key) {
+ var _a, _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion();
+ const reserveCacheRequest = {
+ key,
+ version
+ };
+ const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
+ return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
+ });
+}
+exports.reserveCache = reserveCache;
+function getContentRange(start, end) {
+ // Format: `bytes start-end/filesize
+ // start and end are inclusive
+ // filesize can be *
+ // For a 200 byte chunk starting at byte 0:
+ // Content-Range: bytes 0-199/*
+ return `bytes ${start}-${end}/*`;
+}
+function uploadChunk(httpClient, resourceUrl, data, start, end) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.debug(`Uploading chunk of size ${end -
+ start +
+ 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
+ const additionalHeaders = {
+ "Content-Type": "application/octet-stream",
+ "Content-Range": getContentRange(start, end)
+ };
+ const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
+ return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
+ });
+ const response = yield uploadChunkRequest();
+ if (isSuccessStatusCode(response.message.statusCode)) {
+ return;
+ }
+ if (isRetryableStatusCode(response.message.statusCode)) {
+ core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
+ const retryResponse = yield uploadChunkRequest();
+ if (isSuccessStatusCode(retryResponse.message.statusCode)) {
+ return;
+ }
+ }
+ throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
+ });
+}
+function parseEnvNumber(key) {
+ const value = Number(process.env[key]);
+ if (Number.isNaN(value) || value < 0) {
+ return undefined;
+ }
+ return value;
+}
+function uploadFile(httpClient, cacheId, archivePath) {
+ var _a, _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ // Upload Chunks
+ const fileSize = fs.statSync(archivePath).size;
+ const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
+ const fd = fs.openSync(archivePath, "r");
+ const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY")) !== null && _a !== void 0 ? _a : 4; // # of HTTP requests in parallel
+ const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE")) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024; // 32 MB Chunks
+ core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
+ const parallelUploads = [...new Array(concurrency).keys()];
+ core.debug("Awaiting all uploads");
+ let offset = 0;
+ try {
+ yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
+ while (offset < fileSize) {
+ const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
+ const start = offset;
+ const end = offset + chunkSize - 1;
+ offset += MAX_CHUNK_SIZE;
+ const chunk = fs.createReadStream(archivePath, {
+ fd,
+ start,
+ end,
+ autoClose: false
+ });
+ yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
+ }
+ })));
+ }
+ finally {
+ fs.closeSync(fd);
+ }
+ return;
+ });
+}
+function commitCache(httpClient, cacheId, filesize) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const commitCacheRequest = { size: filesize };
+ return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
+ });
+}
+function saveCache(cacheId, archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ core.debug("Upload cache");
+ yield uploadFile(httpClient, cacheId, archivePath);
+ // Commit Cache
+ core.debug("Commiting cache");
+ const cacheSize = utils.getArchiveFileSize(archivePath);
+ const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
+ if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
+ throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
+ }
+ core.info("Cache saved successfully");
+ });
+}
+exports.saveCache = saveCache;
+
+
+/***/ }),
+
+/***/ 357:
+/***/ (function(module) {
+
+module.exports = require("assert");
+
+/***/ }),
+
+/***/ 368:
+/***/ (function(module) {
+
+module.exports = function atob(str) {
+ return Buffer.from(str, 'base64').toString('binary')
+}
+
+
+/***/ }),
+
+/***/ 370:
+/***/ (function(module) {
+
+module.exports = deprecate
+
+const loggedMessages = {}
+
+function deprecate (message) {
+ if (loggedMessages[message]) {
+ return
+ }
+
+ console.warn(`DEPRECATED (@octokit/rest): ${message}`)
+ loggedMessages[message] = 1
+}
+
+
+/***/ }),
+
+/***/ 383:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert = __webpack_require__(357);
+const path = __webpack_require__(622);
+const pathHelper = __webpack_require__(972);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Helper class for parsing paths into segments
+ */
+class Path {
+ /**
+ * Constructs a Path
+ * @param itemPath Path or array of segments
+ */
+ constructor(itemPath) {
+ this.segments = [];
+ // String
+ if (typeof itemPath === 'string') {
+ assert(itemPath, `Parameter 'itemPath' must not be empty`);
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ // Not rooted
+ if (!pathHelper.hasRoot(itemPath)) {
+ this.segments = itemPath.split(path.sep);
+ }
+ // Rooted
+ else {
+ // Add all segments, while not at the root
+ let remaining = itemPath;
+ let dir = pathHelper.dirname(remaining);
+ while (dir !== remaining) {
+ // Add the segment
+ const basename = path.basename(remaining);
+ this.segments.unshift(basename);
+ // Truncate the last segment
+ remaining = dir;
+ dir = pathHelper.dirname(remaining);
+ }
+ // Remainder is the root
+ this.segments.unshift(remaining);
+ }
+ }
+ // Array
+ else {
+ // Must not be empty
+ assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
+ // Each segment
+ for (let i = 0; i < itemPath.length; i++) {
+ let segment = itemPath[i];
+ // Must not be empty
+ assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
+ // Normalize slashes
+ segment = pathHelper.normalizeSeparators(itemPath[i]);
+ // Root segment
+ if (i === 0 && pathHelper.hasRoot(segment)) {
+ segment = pathHelper.safeTrimTrailingSeparator(segment);
+ assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
+ this.segments.push(segment);
+ }
+ // All other segments
+ else {
+ // Must not contain slash
+ assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
+ this.segments.push(segment);
+ }
+ }
+ }
+ }
+ /**
+ * Converts the path to it's string representation
+ */
+ toString() {
+ // First segment
+ let result = this.segments[0];
+ // All others
+ let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
+ for (let i = 1; i < this.segments.length; i++) {
+ if (skipSlash) {
+ skipSlash = false;
+ }
+ else {
+ result += path.sep;
+ }
+ result += this.segments[i];
+ }
+ return result;
+ }
+}
+exports.Path = Path;
+//# sourceMappingURL=internal-path.js.map
+
+/***/ }),
+
+/***/ 385:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var isPlainObject = _interopDefault(__webpack_require__(696));
+var universalUserAgent = __webpack_require__(562);
+
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
+ }
+
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
+}
+
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach(key => {
+ if (isPlainObject(options[key])) {
+ if (!(key in defaults)) Object.assign(result, {
+ [key]: options[key]
+ });else result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, {
+ [key]: options[key]
+ });
+ }
+ });
+ return result;
+}
+
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? {
+ method,
+ url
+ } : {
+ url: method
+ }, options);
+ } else {
+ options = Object.assign({}, route);
+ } // lowercase header names before merging with defaults to avoid duplicates
+
+
+ options.headers = lowercaseKeys(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
+
+ if (defaults && defaults.mediaType.previews.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
+ }
+
+ mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
+ return mergedOptions;
+}
+
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
+
+ if (names.length === 0) {
+ return url;
+ }
+
+ return url + separator + names.map(name => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
+
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
+}
+
+const urlVariableRegex = /\{[^}]+\}/g;
+
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
+
+ if (!matches) {
+ return [];
+ }
+
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
+
+function omit(object, keysToOmit) {
+ return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
+ obj[key] = object[key];
+ return obj;
+ }, {});
+}
+
+// Based on https://github.com/bramstein/url-template, licensed under BSD
+// TODO: create separate package.
+//
+// Copyright (c) 2012-2014, Bram Stein
+// All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+// 1. Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. The name of the author may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/* istanbul ignore file */
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+
+ return part;
+ }).join("");
+}
+
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+
+function isDefined(value) {
+ return value !== undefined && value !== null;
+}
+
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+
+function getValues(context, operator, key, modifier) {
+ var value = context[key],
+ result = [];
+
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
+ }
+
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ tmp.push(encodeValue(operator, value));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
+ }
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
+ }
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
+ }
+ }
+
+ return result;
+}
+
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+
+ expression.split(/,/g).forEach(function (variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+
+ if (operator && operator !== "+") {
+ var separator = ",";
+
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
+ }
+ } else {
+ return encodeReserved(literal);
+ }
+ });
+}
+
+function parse(options) {
+ // https://fetch.spec.whatwg.org/#methods
+ let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
+
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
+
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
+ }
+
+ const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequset = /application\/octet-stream/i.test(headers.accept);
+
+ if (!isBinaryRequset) {
+ if (options.mediaType.format) {
+ // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
+ headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ }
+
+ if (options.mediaType.previews.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ } // for GET/HEAD requests, set URL query parameters from remaining parameters
+ // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+
+
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ } else {
+ headers["content-length"] = 0;
+ }
+ }
+ } // default content-type for JSON if body is set
+
+
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
+ // fetch does not allow to set `content-length` header, but we can set body to an empty string
+
+
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ } // Only return body/request keys if present
+
+
+ return Object.assign({
+ method,
+ url,
+ headers
+ }, typeof body !== "undefined" ? {
+ body
+ } : null, options.request ? {
+ request: options.request
+ } : null);
+}
+
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
+}
+
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS = merge(oldDefaults, newDefaults);
+ const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
+ return Object.assign(endpoint, {
+ DEFAULTS,
+ defaults: withDefaults.bind(null, DEFAULTS),
+ merge: merge.bind(null, DEFAULTS),
+ parse
+ });
+}
+
+const VERSION = "6.0.1";
+
+const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
+// So we use RequestParameters and add method as additional required property.
+
+const DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: "",
+ previews: []
+ }
+};
+
+const endpoint = withDefaults(null, DEFAULTS);
+
+exports.endpoint = endpoint;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 389:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const fs = __webpack_require__(747);
+const shebangCommand = __webpack_require__(866);
+
+function readShebang(command) {
+ // Read the first 150 bytes from the file
+ const size = 150;
+ let buffer;
+
+ if (Buffer.alloc) {
+ // Node.js v4.5+ / v5.10+
+ buffer = Buffer.alloc(size);
+ } else {
+ // Old Node.js API
+ buffer = new Buffer(size);
+ buffer.fill(0); // zero-fill
+ }
+
+ let fd;
+
+ try {
+ fd = fs.openSync(command, 'r');
+ fs.readSync(fd, buffer, 0, size, 0);
+ fs.closeSync(fd);
+ } catch (e) { /* Empty */ }
+
+ // Attempt to extract shebang (null is returned if not a shebang)
+ return shebangCommand(buffer.toString());
+}
+
+module.exports = readShebang;
+
+
+/***/ }),
+
+/***/ 402:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var fs = __webpack_require__(747)
+var rp = __webpack_require__(302)
+var minimatch = __webpack_require__(93)
+var Minimatch = minimatch.Minimatch
+var inherits = __webpack_require__(689)
+var EE = __webpack_require__(614).EventEmitter
+var path = __webpack_require__(622)
+var assert = __webpack_require__(357)
+var isAbsolute = __webpack_require__(681)
+var globSync = __webpack_require__(245)
+var common = __webpack_require__(644)
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = __webpack_require__(674)
+var util = __webpack_require__(669)
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = __webpack_require__(969)
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
+
+
+/***/ }),
+
+/***/ 413:
+/***/ (function(module) {
+
+module.exports = require("stream");
+
+/***/ }),
+
+/***/ 414:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationBeforeRequest;
+
+const btoa = __webpack_require__(675);
+
+const withAuthorizationPrefix = __webpack_require__(85);
+
+function authenticationBeforeRequest(state, options) {
+ if (typeof state.auth === "string") {
+ options.headers.authorization = withAuthorizationPrefix(state.auth);
+ return;
+ }
+
+ if (state.auth.username) {
+ const hash = btoa(`${state.auth.username}:${state.auth.password}`);
+ options.headers.authorization = `Basic ${hash}`;
+ if (state.otp) {
+ options.headers["x-github-otp"] = state.otp;
+ }
+ return;
+ }
+
+ if (state.auth.clientId) {
+ // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as
+ // Basic Authorization instead of query parameters. The only routes where that applies share the same
+ // URL though: `/applications/:client_id/tokens/:access_token`.
+ //
+ // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization)
+ // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization)
+ // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application)
+ //
+ // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token"
+ // as well as "/applications/123/tokens/token456"
+ if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) {
+ const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`);
+ options.headers.authorization = `Basic ${hash}`;
+ return;
+ }
+
+ options.url += options.url.indexOf("?") === -1 ? "?" : "&";
+ options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`;
+ return;
+ }
+
+ return Promise.resolve()
+
+ .then(() => {
+ return state.auth();
+ })
+
+ .then(authorization => {
+ options.headers.authorization = withAuthorizationPrefix(authorization);
+ });
+}
+
+
+/***/ }),
+
+/***/ 417:
+/***/ (function(module) {
+
+module.exports = require("crypto");
+
+/***/ }),
+
+/***/ 427:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+// Older verions of Node.js might not have `util.getSystemErrorName()`.
+// In that case, fall back to a deprecated internal.
+const util = __webpack_require__(669);
+
+let uv;
+
+if (typeof util.getSystemErrorName === 'function') {
+ module.exports = util.getSystemErrorName;
+} else {
+ try {
+ uv = process.binding('uv');
+
+ if (typeof uv.errname !== 'function') {
+ throw new TypeError('uv.errname is not a function');
+ }
+ } catch (err) {
+ console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
+ uv = null;
+ }
+
+ module.exports = code => errname(uv, code);
+}
+
+// Used for testing the fallback behavior
+module.exports.__test__ = errname;
+
+function errname(uv, code) {
+ if (uv) {
+ return uv.errname(code);
+ }
+
+ if (!(code < 0)) {
+ throw new Error('err >= 0');
+ }
+
+ return `Unknown system error ${code}`;
+}
+
+
+
+/***/ }),
+
+/***/ 431:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const os = __importStar(__webpack_require__(87));
+/**
+ * Commands
+ *
+ * Command Format:
+ * ::name key=value,key=value::message
+ *
+ * Examples:
+ * ::warning::This is the message
+ * ::set-env name=MY_VAR::some value
+ */
+function issueCommand(command, properties, message) {
+ const cmd = new Command(command, properties, message);
+ process.stdout.write(cmd.toString() + os.EOL);
+}
+exports.issueCommand = issueCommand;
+function issue(name, message = '') {
+ issueCommand(name, {}, message);
+}
+exports.issue = issue;
+const CMD_STRING = '::';
+class Command {
+ constructor(command, properties, message) {
+ if (!command) {
+ command = 'missing.command';
+ }
+ this.command = command;
+ this.properties = properties;
+ this.message = message;
+ }
+ toString() {
+ let cmdStr = CMD_STRING + this.command;
+ if (this.properties && Object.keys(this.properties).length > 0) {
+ cmdStr += ' ';
+ let first = true;
+ for (const key in this.properties) {
+ if (this.properties.hasOwnProperty(key)) {
+ const val = this.properties[key];
+ if (val) {
+ if (first) {
+ first = false;
+ }
+ else {
+ cmdStr += ',';
+ }
+ cmdStr += `${key}=${escapeProperty(val)}`;
+ }
+ }
+ }
+ }
+ cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
+ return cmdStr;
+ }
+}
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+ if (input === null || input === undefined) {
+ return '';
+ }
+ else if (typeof input === 'string' || input instanceof String) {
+ return input;
+ }
+ return JSON.stringify(input);
+}
+exports.toCommandValue = toCommandValue;
+function escapeData(s) {
+ return toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A');
+}
+function escapeProperty(s) {
+ return toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A')
+ .replace(/:/g, '%3A')
+ .replace(/,/g, '%2C');
+}
+//# sourceMappingURL=command.js.map
+
+/***/ }),
+
+/***/ 446:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationRequestError;
+
+const { RequestError } = __webpack_require__(102);
+
+function authenticationRequestError(state, error, options) {
+ if (!error.headers) throw error;
+
+ const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
+ // handle "2FA required" error only
+ if (error.status !== 401 || !otpRequired) {
+ throw error;
+ }
+
+ if (
+ error.status === 401 &&
+ otpRequired &&
+ error.request &&
+ error.request.headers["x-github-otp"]
+ ) {
+ if (state.otp) {
+ delete state.otp; // no longer valid, request again
+ } else {
+ throw new RequestError(
+ "Invalid one-time password for two-factor authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+ }
+
+ if (typeof state.auth.on2fa !== "function") {
+ throw new RequestError(
+ "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ return Promise.resolve()
+ .then(() => {
+ return state.auth.on2fa();
+ })
+ .then(oneTimePassword => {
+ const newOptions = Object.assign(options, {
+ headers: Object.assign(options.headers, {
+ "x-github-otp": oneTimePassword
+ })
+ });
+ return state.octokit.request(newOptions).then(response => {
+ // If OTP still valid, then persist it for following requests
+ state.otp = oneTimePassword;
+ return response;
+ });
+ });
+}
+
+
+/***/ }),
+
+/***/ 453:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(969)
+var eos = __webpack_require__(9)
+var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
+
+var noop = function () {}
+var ancient = /^v?\.0/.test(process.version)
+
+var isFn = function (fn) {
+ return typeof fn === 'function'
+}
+
+var isFS = function (stream) {
+ if (!ancient) return false // newer node version do not need to care about fs is a special way
+ if (!fs) return false // browser
+ return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
+}
+
+var isRequest = function (stream) {
+ return stream.setHeader && isFn(stream.abort)
+}
+
+var destroyer = function (stream, reading, writing, callback) {
+ callback = once(callback)
+
+ var closed = false
+ stream.on('close', function () {
+ closed = true
+ })
+
+ eos(stream, {readable: reading, writable: writing}, function (err) {
+ if (err) return callback(err)
+ closed = true
+ callback()
+ })
+
+ var destroyed = false
+ return function (err) {
+ if (closed) return
+ if (destroyed) return
+ destroyed = true
+
+ if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
+ if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
+
+ if (isFn(stream.destroy)) return stream.destroy()
+
+ callback(err || new Error('stream was destroyed'))
+ }
+}
+
+var call = function (fn) {
+ fn()
+}
+
+var pipe = function (from, to) {
+ return from.pipe(to)
+}
+
+var pump = function () {
+ var streams = Array.prototype.slice.call(arguments)
+ var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
+
+ if (Array.isArray(streams[0])) streams = streams[0]
+ if (streams.length < 2) throw new Error('pump requires two streams per minimum')
+
+ var error
+ var destroys = streams.map(function (stream, i) {
+ var reading = i < streams.length - 1
+ var writing = i > 0
+ return destroyer(stream, reading, writing, function (err) {
+ if (!error) error = err
+ if (err) destroys.forEach(call)
+ if (reading) return
+ destroys.forEach(call)
+ callback(error)
+ })
+ })
+
+ return streams.reduce(pipe)
+}
+
+module.exports = pump
+
+
+/***/ }),
+
+/***/ 454:
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var Stream = _interopDefault(__webpack_require__(413));
+var http = _interopDefault(__webpack_require__(605));
+var Url = _interopDefault(__webpack_require__(835));
+var https = _interopDefault(__webpack_require__(34));
+var zlib = _interopDefault(__webpack_require__(761));
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = __webpack_require__(18).convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parse_url(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parse_url(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parse_url(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+const resolve_url = Url.resolve;
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ request.body.destroy(error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+ finalize();
+ });
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ const locationURL = location === null ? null : resolve_url(request.url, location);
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout
+ };
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
+
+
+/***/ }),
+
+/***/ 462:
+/***/ (function(module) {
+
+"use strict";
+
+
+// See http://www.robvanderwoude.com/escapechars.php
+const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
+
+function escapeCommand(arg) {
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ return arg;
+}
+
+function escapeArgument(arg, doubleEscapeMetaChars) {
+ // Convert to string
+ arg = `${arg}`;
+
+ // Algorithm below is based on https://qntm.org/cmd
+
+ // Sequence of backslashes followed by a double quote:
+ // double up all the backslashes and escape the double quote
+ arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+
+ // Sequence of backslashes followed by the end of the string
+ // (which will become a double quote later):
+ // double up all the backslashes
+ arg = arg.replace(/(\\*)$/, '$1$1');
+
+ // All other backslashes occur literally
+
+ // Quote the whole thing:
+ arg = `"${arg}"`;
+
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ // Double escape meta chars if necessary
+ if (doubleEscapeMetaChars) {
+ arg = arg.replace(metaCharsRegExp, '^$1');
+ }
+
+ return arg;
+}
+
+module.exports.command = escapeCommand;
+module.exports.argument = escapeArgument;
+
+
+/***/ }),
+
+/***/ 463:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __webpack_require__(692);
+var once = _interopDefault(__webpack_require__(969));
+
+const logOnce = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ this.headers = options.headers || {}; // redact request credentials without mutating original request options
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 469:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
+const graphql_1 = __webpack_require__(898);
+const rest_1 = __webpack_require__(741);
+const Context = __importStar(__webpack_require__(262));
+const httpClient = __importStar(__webpack_require__(539));
+// We need this in order to extend Octokit
+rest_1.Octokit.prototype = new rest_1.Octokit();
+exports.context = new Context.Context();
+class GitHub extends rest_1.Octokit {
+ constructor(token, opts) {
+ super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
+ this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
+ }
+ /**
+ * Disambiguates the constructor overload parameters
+ */
+ static disambiguate(token, opts) {
+ return [
+ typeof token === 'string' ? token : '',
+ typeof token === 'object' ? token : opts || {}
+ ];
+ }
+ static getOctokitOptions(args) {
+ const token = args[0];
+ const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
+ // Auth
+ const auth = GitHub.getAuthString(token, options);
+ if (auth) {
+ options.auth = auth;
+ }
+ // Proxy
+ const agent = GitHub.getProxyAgent(options);
+ if (agent) {
+ // Shallow clone - don't mutate the object provided by the caller
+ options.request = options.request ? Object.assign({}, options.request) : {};
+ // Set the agent
+ options.request.agent = agent;
+ }
+ return options;
+ }
+ static getGraphQL(args) {
+ const defaults = {};
+ const token = args[0];
+ const options = args[1];
+ // Authorization
+ const auth = this.getAuthString(token, options);
+ if (auth) {
+ defaults.headers = {
+ authorization: auth
+ };
+ }
+ // Proxy
+ const agent = GitHub.getProxyAgent(options);
+ if (agent) {
+ defaults.request = { agent };
+ }
+ return graphql_1.graphql.defaults(defaults);
+ }
+ static getAuthString(token, options) {
+ // Validate args
+ if (!token && !options.auth) {
+ throw new Error('Parameter token or opts.auth is required');
+ }
+ else if (token && options.auth) {
+ throw new Error('Parameters token and opts.auth may not both be specified');
+ }
+ return typeof options.auth === 'string' ? options.auth : `token ${token}`;
+ }
+ static getProxyAgent(options) {
+ var _a;
+ if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
+ const serverUrl = 'https://api.github.com';
+ if (httpClient.getProxyUrl(serverUrl)) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgent(serverUrl);
+ }
+ }
+ return undefined;
+ }
+}
+exports.GitHub = GitHub;
+//# sourceMappingURL=github.js.map
+
+/***/ }),
+
+/***/ 470:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const command_1 = __webpack_require__(431);
+const os = __importStar(__webpack_require__(87));
+const path = __importStar(__webpack_require__(622));
+/**
+ * The code to exit an action
+ */
+var ExitCode;
+(function (ExitCode) {
+ /**
+ * A code indicating that the action was successful
+ */
+ ExitCode[ExitCode["Success"] = 0] = "Success";
+ /**
+ * A code indicating that the action was a failure
+ */
+ ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+ const convertedVal = command_1.toCommandValue(val);
+ process.env[name] = convertedVal;
+ command_1.issueCommand('set-env', { name }, convertedVal);
+}
+exports.exportVariable = exportVariable;
+/**
+ * Registers a secret which will get masked from logs
+ * @param secret value of the secret
+ */
+function setSecret(secret) {
+ command_1.issueCommand('add-mask', {}, secret);
+}
+exports.setSecret = setSecret;
+/**
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
+ */
+function addPath(inputPath) {
+ command_1.issueCommand('add-path', {}, inputPath);
+ process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
+}
+exports.addPath = addPath;
+/**
+ * Gets the value of an input. The value is also trimmed.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string
+ */
+function getInput(name, options) {
+ const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+ if (options && options.required && !val) {
+ throw new Error(`Input required and not supplied: ${name}`);
+ }
+ return val.trim();
+}
+exports.getInput = getInput;
+/**
+ * Sets the value of an output.
+ *
+ * @param name name of the output to set
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+ command_1.issueCommand('set-output', { name }, value);
+}
+exports.setOutput = setOutput;
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+ command_1.issue('echo', enabled ? 'on' : 'off');
+}
+exports.setCommandEcho = setCommandEcho;
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+ process.exitCode = ExitCode.Failure;
+ error(message);
+}
+exports.setFailed = setFailed;
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+ return process.env['RUNNER_DEBUG'] === '1';
+}
+exports.isDebug = isDebug;
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function debug(message) {
+ command_1.issueCommand('debug', {}, message);
+}
+exports.debug = debug;
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ */
+function error(message) {
+ command_1.issue('error', message instanceof Error ? message.toString() : message);
+}
+exports.error = error;
+/**
+ * Adds an warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ */
+function warning(message) {
+ command_1.issue('warning', message instanceof Error ? message.toString() : message);
+}
+exports.warning = warning;
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+ process.stdout.write(message + os.EOL);
+}
+exports.info = info;
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+ command_1.issue('group', name);
+}
+exports.startGroup = startGroup;
+/**
+ * End an output group.
+ */
+function endGroup() {
+ command_1.issue('endgroup');
+}
+exports.endGroup = endGroup;
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+ return __awaiter(this, void 0, void 0, function* () {
+ startGroup(name);
+ let result;
+ try {
+ result = yield fn();
+ }
+ finally {
+ endGroup();
+ }
+ return result;
+ });
+}
+exports.group = group;
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param name name of the state to store
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+ command_1.issueCommand('save-state', { name }, value);
+}
+exports.saveState = saveState;
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param name name of the state to get
+ * @returns string
+ */
+function getState(name) {
+ return process.env[`STATE_${name}`] || '';
+}
+exports.getState = getState;
+//# sourceMappingURL=core.js.map
+
+/***/ }),
+
+/***/ 489:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const path = __webpack_require__(622);
+const which = __webpack_require__(814);
+const pathKey = __webpack_require__(39)();
+
+function resolveCommandAttempt(parsed, withoutPathExt) {
+ const cwd = process.cwd();
+ const hasCustomCwd = parsed.options.cwd != null;
+
+ // If a custom `cwd` was specified, we need to change the process cwd
+ // because `which` will do stat calls but does not support a custom cwd
+ if (hasCustomCwd) {
+ try {
+ process.chdir(parsed.options.cwd);
+ } catch (err) {
+ /* Empty */
+ }
+ }
+
+ let resolved;
+
+ try {
+ resolved = which.sync(parsed.command, {
+ path: (parsed.options.env || process.env)[pathKey],
+ pathExt: withoutPathExt ? path.delimiter : undefined,
+ });
+ } catch (e) {
+ /* Empty */
+ } finally {
+ process.chdir(cwd);
+ }
+
+ // If we successfully resolved, ensure that an absolute path is returned
+ // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
+ if (resolved) {
+ resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
+ }
+
+ return resolved;
+}
+
+function resolveCommand(parsed) {
+ return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
+}
+
+module.exports = resolveCommand;
+
+
+/***/ }),
+
+/***/ 510:
+/***/ (function(module) {
+
+module.exports = addHook
+
+function addHook (state, kind, name, hook) {
+ var orig = hook
+ if (!state.registry[name]) {
+ state.registry[name] = []
+ }
+
+ if (kind === 'before') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options))
+ }
+ }
+
+ if (kind === 'after') {
+ hook = function (method, options) {
+ var result
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_
+ return orig(result, options)
+ })
+ .then(function () {
+ return result
+ })
+ }
+ }
+
+ if (kind === 'error') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options)
+ })
+ }
+ }
+
+ state.registry[name].push({
+ hook: hook,
+ orig: orig
+ })
+}
+
+
+/***/ }),
+
+/***/ 513:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = factory;
+
+const Octokit = __webpack_require__(135);
+const registerPlugin = __webpack_require__(861);
+
+function factory(plugins) {
+ const Api = Octokit.bind(null, plugins || []);
+ Api.plugin = registerPlugin.bind(null, plugins || []);
+ return Api;
+}
+
+
+/***/ }),
+
+/***/ 514:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const io = __importStar(__webpack_require__(1));
+const tc = __importStar(__webpack_require__(533));
+const installer = __importStar(__webpack_require__(634));
+const path_1 = __importDefault(__webpack_require__(622));
+const child_process_1 = __importDefault(__webpack_require__(129));
+const fs_1 = __importDefault(__webpack_require__(747));
+function run() {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ //
+ // versionSpec is optional. If supplied, install / use from the tool cache
+ // If not supplied then problem matchers will still be setup. Useful for self-hosted.
+ //
+ let versionSpec = core.getInput('go-version');
+ // stable will be true unless false is the exact input
+ // since getting unstable versions should be explicit
+ let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
+ console.log(`Setup go ${stable ? 'stable' : ''} version spec ${versionSpec}`);
+ if (versionSpec) {
+ let installDir = tc.find('go', versionSpec);
+ if (!installDir) {
+ console.log(`A version satisfying ${versionSpec} not found locally, attempting to download ...`);
+ installDir = yield installer.downloadGo(versionSpec, stable);
+ console.log('Installed');
+ }
+ if (installDir) {
+ core.exportVariable('GOROOT', installDir);
+ core.addPath(path_1.default.join(installDir, 'bin'));
+ console.log('Added go to the path');
+ let added = addBinToPath();
+ core.debug(`add bin ${added}`);
+ }
+ else {
+ throw new Error(`Could not find a version that satisfied version spec: ${versionSpec}`);
+ }
+ }
+ // add problem matchers
+ const matchersPath = path_1.default.join(__dirname, '..', 'matchers.json');
+ console.log(`##[add-matcher]${matchersPath}`);
+ // output the version actually being used
+ let goPath = yield io.which('go');
+ let goVersion = (child_process_1.default.execSync(`${goPath} version`) || '').toString();
+ console.log(goVersion);
+ core.startGroup('go env');
+ let goEnv = (child_process_1.default.execSync(`${goPath} env`) || '').toString();
+ console.log(goEnv);
+ core.endGroup();
+ }
+ catch (error) {
+ core.setFailed(error.message);
+ }
+ });
+}
+exports.run = run;
+function addBinToPath() {
+ return __awaiter(this, void 0, void 0, function* () {
+ let added = false;
+ let g = yield io.which('go');
+ _debug(`which go :${g}:`);
+ if (!g) {
+ _debug('go not in the path');
+ return added;
+ }
+ let buf = child_process_1.default.execSync('go env GOPATH');
+ if (buf) {
+ let gp = buf.toString().trim();
+ _debug(`go env GOPATH :${gp}:`);
+ if (!fs_1.default.existsSync(gp)) {
+ // some of the hosted images have go install but not profile dir
+ _debug(`creating ${gp}`);
+ io.mkdirP(gp);
+ }
+ let bp = path_1.default.join(gp, 'bin');
+ if (!fs_1.default.existsSync(bp)) {
+ _debug(`creating ${bp}`);
+ io.mkdirP(bp);
+ }
+ core.addPath(bp);
+ added = true;
+ }
+ return added;
+ });
+}
+exports.addBinToPath = addBinToPath;
+function _debug(message) {
+ core.debug(message);
+}
+exports._debug = _debug;
+
+
+/***/ }),
+
+/***/ 523:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var register = __webpack_require__(280)
+var addHook = __webpack_require__(510)
+var removeHook = __webpack_require__(763)
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind
+var bindable = bind.bind(bind)
+
+function bindApi (hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
+ hook.api = { remove: removeHookRef }
+ hook.remove = removeHookRef
+
+ ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind]
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
+ })
+}
+
+function HookSingular () {
+ var singularHookName = 'h'
+ var singularHookState = {
+ registry: {}
+ }
+ var singularHook = register.bind(null, singularHookState, singularHookName)
+ bindApi(singularHook, singularHookState, singularHookName)
+ return singularHook
+}
+
+function HookCollection () {
+ var state = {
+ registry: {}
+ }
+
+ var hook = register.bind(null, state)
+ bindApi(hook, state)
+
+ return hook
+}
+
+var collectionHookDeprecationMessageDisplayed = false
+function Hook () {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
+ collectionHookDeprecationMessageDisplayed = true
+ }
+ return HookCollection()
+}
+
+Hook.Singular = HookSingular.bind()
+Hook.Collection = HookCollection.bind()
+
+module.exports = Hook
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook
+module.exports.Singular = Hook.Singular
+module.exports.Collection = Hook.Collection
+
+
+/***/ }),
+
+/***/ 533:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const io = __importStar(__webpack_require__(1));
+const fs = __importStar(__webpack_require__(747));
+const os = __importStar(__webpack_require__(87));
+const path = __importStar(__webpack_require__(622));
+const httpm = __importStar(__webpack_require__(539));
+const semver = __importStar(__webpack_require__(864));
+const stream = __importStar(__webpack_require__(413));
+const util = __importStar(__webpack_require__(669));
+const v4_1 = __importDefault(__webpack_require__(826));
+const exec_1 = __webpack_require__(986);
+const assert_1 = __webpack_require__(357);
+const retry_helper_1 = __webpack_require__(979);
+class HTTPError extends Error {
+ constructor(httpStatusCode) {
+ super(`Unexpected HTTP response: ${httpStatusCode}`);
+ this.httpStatusCode = httpStatusCode;
+ Object.setPrototypeOf(this, new.target.prototype);
+ }
+}
+exports.HTTPError = HTTPError;
+const IS_WINDOWS = process.platform === 'win32';
+const userAgent = 'actions/tool-cache';
+/**
+ * Download a tool from an url and stream it into a file
+ *
+ * @param url url of tool to download
+ * @param dest path to download tool
+ * @returns path to downloaded tool
+ */
+function downloadTool(url, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ dest = dest || path.join(_getTempDirectory(), v4_1.default());
+ yield io.mkdirP(path.dirname(dest));
+ core.debug(`Downloading ${url}`);
+ core.debug(`Destination ${dest}`);
+ const maxAttempts = 3;
+ const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
+ const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
+ const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
+ return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
+ return yield downloadToolAttempt(url, dest || '');
+ }), (err) => {
+ if (err instanceof HTTPError && err.httpStatusCode) {
+ // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
+ if (err.httpStatusCode < 500 &&
+ err.httpStatusCode !== 408 &&
+ err.httpStatusCode !== 429) {
+ return false;
+ }
+ }
+ // Otherwise retry
+ return true;
+ });
+ });
+}
+exports.downloadTool = downloadTool;
+function downloadToolAttempt(url, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (fs.existsSync(dest)) {
+ throw new Error(`Destination file path ${dest} already exists`);
+ }
+ // Get the response headers
+ const http = new httpm.HttpClient(userAgent, [], {
+ allowRetries: false
+ });
+ const response = yield http.get(url);
+ if (response.message.statusCode !== 200) {
+ const err = new HTTPError(response.message.statusCode);
+ core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
+ throw err;
+ }
+ // Download the response body
+ const pipeline = util.promisify(stream.pipeline);
+ const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
+ const readStream = responseMessageFactory();
+ let succeeded = false;
+ try {
+ yield pipeline(readStream, fs.createWriteStream(dest));
+ core.debug('download complete');
+ succeeded = true;
+ return dest;
+ }
+ finally {
+ // Error, delete dest before retry
+ if (!succeeded) {
+ core.debug('download failed');
+ try {
+ yield io.rmRF(dest);
+ }
+ catch (err) {
+ core.debug(`Failed to delete '${dest}'. ${err.message}`);
+ }
+ }
+ }
+ });
+}
+/**
+ * Extract a .7z file
+ *
+ * @param file path to the .7z file
+ * @param dest destination directory. Optional.
+ * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
+ * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
+ * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
+ * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
+ * interface, it is smaller than the full command line interface, and it does support long paths. At the
+ * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
+ * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
+ * to 7zr.exe can be pass to this function.
+ * @returns path to the destination directory
+ */
+function extract7z(file, dest, _7zPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
+ assert_1.ok(file, 'parameter "file" is required');
+ dest = yield _createExtractFolder(dest);
+ const originalCwd = process.cwd();
+ process.chdir(dest);
+ if (_7zPath) {
+ try {
+ const args = [
+ 'x',
+ '-bb1',
+ '-bd',
+ '-sccUTF-8',
+ file
+ ];
+ const options = {
+ silent: true
+ };
+ yield exec_1.exec(`"${_7zPath}"`, args, options);
+ }
+ finally {
+ process.chdir(originalCwd);
+ }
+ }
+ else {
+ const escapedScript = path
+ .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
+ .replace(/'/g, "''")
+ .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
+ const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
+ const args = [
+ '-NoLogo',
+ '-Sta',
+ '-NoProfile',
+ '-NonInteractive',
+ '-ExecutionPolicy',
+ 'Unrestricted',
+ '-Command',
+ command
+ ];
+ const options = {
+ silent: true
+ };
+ try {
+ const powershellPath = yield io.which('powershell', true);
+ yield exec_1.exec(`"${powershellPath}"`, args, options);
+ }
+ finally {
+ process.chdir(originalCwd);
+ }
+ }
+ return dest;
+ });
+}
+exports.extract7z = extract7z;
+/**
+ * Extract a compressed tar archive
+ *
+ * @param file path to the tar
+ * @param dest destination directory. Optional.
+ * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
+ * @returns path to the destination directory
+ */
+function extractTar(file, dest, flags = 'xz') {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!file) {
+ throw new Error("parameter 'file' is required");
+ }
+ // Create dest
+ dest = yield _createExtractFolder(dest);
+ // Determine whether GNU tar
+ core.debug('Checking tar --version');
+ let versionOutput = '';
+ yield exec_1.exec('tar --version', [], {
+ ignoreReturnCode: true,
+ silent: true,
+ listeners: {
+ stdout: (data) => (versionOutput += data.toString()),
+ stderr: (data) => (versionOutput += data.toString())
+ }
+ });
+ core.debug(versionOutput.trim());
+ const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
+ // Initialize args
+ const args = [flags];
+ let destArg = dest;
+ let fileArg = file;
+ if (IS_WINDOWS && isGnuTar) {
+ args.push('--force-local');
+ destArg = dest.replace(/\\/g, '/');
+ // Technically only the dest needs to have `/` but for aesthetic consistency
+ // convert slashes in the file arg too.
+ fileArg = file.replace(/\\/g, '/');
+ }
+ if (isGnuTar) {
+ // Suppress warnings when using GNU tar to extract archives created by BSD tar
+ args.push('--warning=no-unknown-keyword');
+ }
+ args.push('-C', destArg, '-f', fileArg);
+ yield exec_1.exec(`tar`, args);
+ return dest;
+ });
+}
+exports.extractTar = extractTar;
+/**
+ * Extract a zip
+ *
+ * @param file path to the zip
+ * @param dest destination directory. Optional.
+ * @returns path to the destination directory
+ */
+function extractZip(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!file) {
+ throw new Error("parameter 'file' is required");
+ }
+ dest = yield _createExtractFolder(dest);
+ if (IS_WINDOWS) {
+ yield extractZipWin(file, dest);
+ }
+ else {
+ yield extractZipNix(file, dest);
+ }
+ return dest;
+ });
+}
+exports.extractZip = extractZip;
+function extractZipWin(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // build the powershell command
+ const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
+ const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
+ // run powershell
+ const powershellPath = yield io.which('powershell');
+ const args = [
+ '-NoLogo',
+ '-Sta',
+ '-NoProfile',
+ '-NonInteractive',
+ '-ExecutionPolicy',
+ 'Unrestricted',
+ '-Command',
+ command
+ ];
+ yield exec_1.exec(`"${powershellPath}"`, args);
+ });
+}
+function extractZipNix(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const unzipPath = yield io.which('unzip');
+ yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
+ });
+}
+/**
+ * Caches a directory and installs it into the tool cacheDir
+ *
+ * @param sourceDir the directory to cache into tools
+ * @param tool tool name
+ * @param version version of the tool. semver format
+ * @param arch architecture of the tool. Optional. Defaults to machine architecture
+ */
+function cacheDir(sourceDir, tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ version = semver.clean(version) || version;
+ arch = arch || os.arch();
+ core.debug(`Caching tool ${tool} ${version} ${arch}`);
+ core.debug(`source dir: ${sourceDir}`);
+ if (!fs.statSync(sourceDir).isDirectory()) {
+ throw new Error('sourceDir is not a directory');
+ }
+ // Create the tool dir
+ const destPath = yield _createToolPath(tool, version, arch);
+ // copy each child item. do not move. move can fail on Windows
+ // due to anti-virus software having an open handle on a file.
+ for (const itemName of fs.readdirSync(sourceDir)) {
+ const s = path.join(sourceDir, itemName);
+ yield io.cp(s, destPath, { recursive: true });
+ }
+ // write .complete
+ _completeToolPath(tool, version, arch);
+ return destPath;
+ });
+}
+exports.cacheDir = cacheDir;
+/**
+ * Caches a downloaded file (GUID) and installs it
+ * into the tool cache with a given targetName
+ *
+ * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
+ * @param targetFile the name of the file name in the tools directory
+ * @param tool tool name
+ * @param version version of the tool. semver format
+ * @param arch architecture of the tool. Optional. Defaults to machine architecture
+ */
+function cacheFile(sourceFile, targetFile, tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ version = semver.clean(version) || version;
+ arch = arch || os.arch();
+ core.debug(`Caching tool ${tool} ${version} ${arch}`);
+ core.debug(`source file: ${sourceFile}`);
+ if (!fs.statSync(sourceFile).isFile()) {
+ throw new Error('sourceFile is not a file');
+ }
+ // create the tool dir
+ const destFolder = yield _createToolPath(tool, version, arch);
+ // copy instead of move. move can fail on Windows due to
+ // anti-virus software having an open handle on a file.
+ const destPath = path.join(destFolder, targetFile);
+ core.debug(`destination file ${destPath}`);
+ yield io.cp(sourceFile, destPath);
+ // write .complete
+ _completeToolPath(tool, version, arch);
+ return destFolder;
+ });
+}
+exports.cacheFile = cacheFile;
+/**
+ * Finds the path to a tool version in the local installed tool cache
+ *
+ * @param toolName name of the tool
+ * @param versionSpec version of the tool
+ * @param arch optional arch. defaults to arch of computer
+ */
+function find(toolName, versionSpec, arch) {
+ if (!toolName) {
+ throw new Error('toolName parameter is required');
+ }
+ if (!versionSpec) {
+ throw new Error('versionSpec parameter is required');
+ }
+ arch = arch || os.arch();
+ // attempt to resolve an explicit version
+ if (!_isExplicitVersion(versionSpec)) {
+ const localVersions = findAllVersions(toolName, arch);
+ const match = _evaluateVersions(localVersions, versionSpec);
+ versionSpec = match;
+ }
+ // check for the explicit version in the cache
+ let toolPath = '';
+ if (versionSpec) {
+ versionSpec = semver.clean(versionSpec) || '';
+ const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
+ core.debug(`checking cache: ${cachePath}`);
+ if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
+ core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
+ toolPath = cachePath;
+ }
+ else {
+ core.debug('not found');
+ }
+ }
+ return toolPath;
+}
+exports.find = find;
+/**
+ * Finds the paths to all versions of a tool that are installed in the local tool cache
+ *
+ * @param toolName name of the tool
+ * @param arch optional arch. defaults to arch of computer
+ */
+function findAllVersions(toolName, arch) {
+ const versions = [];
+ arch = arch || os.arch();
+ const toolPath = path.join(_getCacheDirectory(), toolName);
+ if (fs.existsSync(toolPath)) {
+ const children = fs.readdirSync(toolPath);
+ for (const child of children) {
+ if (_isExplicitVersion(child)) {
+ const fullPath = path.join(toolPath, child, arch || '');
+ if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
+ versions.push(child);
+ }
+ }
+ }
+ }
+ return versions;
+}
+exports.findAllVersions = findAllVersions;
+function _createExtractFolder(dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!dest) {
+ // create a temp dir
+ dest = path.join(_getTempDirectory(), v4_1.default());
+ }
+ yield io.mkdirP(dest);
+ return dest;
+ });
+}
+function _createToolPath(tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
+ core.debug(`destination ${folderPath}`);
+ const markerPath = `${folderPath}.complete`;
+ yield io.rmRF(folderPath);
+ yield io.rmRF(markerPath);
+ yield io.mkdirP(folderPath);
+ return folderPath;
+ });
+}
+function _completeToolPath(tool, version, arch) {
+ const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
+ const markerPath = `${folderPath}.complete`;
+ fs.writeFileSync(markerPath, '');
+ core.debug('finished caching tool');
+}
+function _isExplicitVersion(versionSpec) {
+ const c = semver.clean(versionSpec) || '';
+ core.debug(`isExplicit: ${c}`);
+ const valid = semver.valid(c) != null;
+ core.debug(`explicit? ${valid}`);
+ return valid;
+}
+function _evaluateVersions(versions, versionSpec) {
+ let version = '';
+ core.debug(`evaluating ${versions.length} versions`);
+ versions = versions.sort((a, b) => {
+ if (semver.gt(a, b)) {
+ return 1;
+ }
+ return -1;
+ });
+ for (let i = versions.length - 1; i >= 0; i--) {
+ const potential = versions[i];
+ const satisfied = semver.satisfies(potential, versionSpec);
+ if (satisfied) {
+ version = potential;
+ break;
+ }
+ }
+ if (version) {
+ core.debug(`matched: ${version}`);
+ }
+ else {
+ core.debug('match not found');
+ }
+ return version;
+}
+/**
+ * Gets RUNNER_TOOL_CACHE
+ */
+function _getCacheDirectory() {
+ const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
+ assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
+ return cacheDirectory;
+}
+/**
+ * Gets RUNNER_TEMP
+ */
+function _getTempDirectory() {
+ const tempDirectory = process.env['RUNNER_TEMP'] || '';
+ assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
+ return tempDirectory;
+}
+/**
+ * Gets a global variable
+ */
+function _getGlobal(key, defaultValue) {
+ /* eslint-disable @typescript-eslint/no-explicit-any */
+ const value = global[key];
+ /* eslint-enable @typescript-eslint/no-explicit-any */
+ return value !== undefined ? value : defaultValue;
+}
+//# sourceMappingURL=tool-cache.js.map
+
+/***/ }),
+
+/***/ 536:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasFirstPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasFirstPage (link) {
+ deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).first
+}
+
+
+/***/ }),
+
+/***/ 538:
+/***/ (function(module) {
+
+module.exports = validateAuth;
+
+function validateAuth(auth) {
+ if (typeof auth === "string") {
+ return;
+ }
+
+ if (typeof auth === "function") {
+ return;
+ }
+
+ if (auth.username && auth.password) {
+ return;
+ }
+
+ if (auth.clientId && auth.clientSecret) {
+ return;
+ }
+
+ throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`);
+}
+
+
+/***/ }),
+
+/***/ 539:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const url = __webpack_require__(835);
+const http = __webpack_require__(605);
+const https = __webpack_require__(34);
+const pm = __webpack_require__(950);
+let tunnel;
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers = exports.Headers || (exports.Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+ let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
+}
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
+ }
+ readBody() {
+ return new Promise(async (resolve, reject) => {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ });
+ }
+}
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+ let parsedUrl = url.parse(requestUrl);
+ return parsedUrl.protocol === 'https:';
+}
+exports.isHttps = isHttps;
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = userAgent;
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
+ }
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
+ }
+ }
+ }
+ options(requestUrl, additionalHeaders) {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ }
+ get(requestUrl, additionalHeaders) {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ }
+ del(requestUrl, additionalHeaders) {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ }
+ post(requestUrl, data, additionalHeaders) {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ }
+ patch(requestUrl, data, additionalHeaders) {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ }
+ put(requestUrl, data, additionalHeaders) {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ }
+ head(requestUrl, additionalHeaders) {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ }
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ }
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ async getJson(requestUrl, additionalHeaders = {}) {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ let res = await this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async postJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async putJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async patchJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ async request(verb, requestUrl, data, headers) {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ let parsedUrl = url.parse(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ while (numTries < maxTries) {
+ response = await this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (let i = 0; i < this.handlers.length; i++) {
+ if (this.handlers[i].canHandleAuthentication(response)) {
+ authenticationHandler = this.handlers[i];
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ let parsedRedirectUrl = url.parse(redirectUrl);
+ if (parsedUrl.protocol == 'https:' &&
+ parsedUrl.protocol != parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ await response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (let header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = await this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ await response.readBody();
+ await this._performExponentialBackoff(numTries);
+ }
+ }
+ return response;
+ }
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
+ }
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return new Promise((resolve, reject) => {
+ let callbackForResult = function (err, res) {
+ if (err) {
+ reject(err);
+ }
+ resolve(res);
+ };
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ }
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ let socket;
+ if (typeof data === 'string') {
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ let handleResult = (err, res) => {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ };
+ let req = info.httpModule.request(info.options, (msg) => {
+ let res = new HttpClientResponse(msg);
+ handleResult(null, res);
+ });
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error('Request timeout: ' + info.options.path), null);
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err, null);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ let parsedUrl = url.parse(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? https : http;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ this.handlers.forEach(handler => {
+ handler.prepareRequest(info.options);
+ });
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+ }
+ return additionalHeaders[header] || clientHeader || _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ let proxyUrl = pm.getProxyUrl(parsedUrl);
+ let useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (this._keepAlive && !useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (!!agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (!!this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+ }
+ if (useProxy) {
+ // If using proxy, need tunnel
+ if (!tunnel) {
+ tunnel = __webpack_require__(856);
+ }
+ const agentOptions = {
+ maxSockets: maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: {
+ proxyAuth: proxyUrl.auth,
+ host: proxyUrl.hostname,
+ port: proxyUrl.port
+ }
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if reusing agent across request and tunneling agent isn't assigned create a new agent
+ if (this._keepAlive && !agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
+ agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+ this._agent = agent;
+ }
+ // if not using private agent and tunnel agent isn't setup then use global agent
+ if (!agent) {
+ agent = usingSsl ? https.globalAgent : http.globalAgent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ }
+ static dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ let a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ async _processResponse(res, options) {
+ return new Promise(async (resolve, reject) => {
+ const statusCode = res.message.statusCode;
+ const response = {
+ statusCode: statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode == HttpCodes.NotFound) {
+ resolve(response);
+ }
+ let obj;
+ let contents;
+ // get the result from the body
+ try {
+ contents = await res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = 'Failed request: (' + statusCode + ')';
+ }
+ let err = new Error(msg);
+ // attach statusCode and body obj (if available) to the error object
+ err['statusCode'] = statusCode;
+ if (response.result) {
+ err['result'] = response.result;
+ }
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ });
+ }
+}
+exports.HttpClient = HttpClient;
+
+
+/***/ }),
+
+/***/ 550:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getNextPage
+
+const getPage = __webpack_require__(265)
+
+function getNextPage (octokit, link, headers) {
+ return getPage(octokit, link, 'next', headers)
+}
+
+
+/***/ }),
+
+/***/ 558:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasPreviousPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasPreviousPage (link) {
+ deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).prev
+}
+
+
+/***/ }),
+
+/***/ 559:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var deprecation = __webpack_require__(692);
+
+var endpointsByScope = {
+ actions: {
+ cancelWorkflowRun: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/cancel"
+ },
+ createOrUpdateSecretForRepo: {
+ method: "PUT",
+ params: {
+ encrypted_value: {
+ type: "string"
+ },
+ key_id: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ createRegistrationToken: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/registration-token"
+ },
+ createRemoveToken: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/remove-token"
+ },
+ deleteArtifact: {
+ method: "DELETE",
+ params: {
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
+ },
+ deleteSecretFromRepo: {
+ method: "DELETE",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ downloadArtifact: {
+ method: "GET",
+ params: {
+ archive_format: {
+ required: true,
+ type: "string"
+ },
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format"
+ },
+ getArtifact: {
+ method: "GET",
+ params: {
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
+ },
+ getPublicKey: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/public-key"
+ },
+ getSecret: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ getSelfHostedRunner: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ runner_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/:runner_id"
+ },
+ getWorkflow: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ workflow_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows/:workflow_id"
+ },
+ getWorkflowJob: {
+ method: "GET",
+ params: {
+ job_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/jobs/:job_id"
+ },
+ getWorkflowRun: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id"
+ },
+ listDownloadsForSelfHostedRunnerApplication: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/downloads"
+ },
+ listJobsForWorkflowRun: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/jobs"
+ },
+ listRepoWorkflowRuns: {
+ method: "GET",
+ params: {
+ actor: {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ event: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["completed", "status", "conclusion"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs"
+ },
+ listRepoWorkflows: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows"
+ },
+ listSecretsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets"
+ },
+ listSelfHostedRunnersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners"
+ },
+ listWorkflowJobLogs: {
+ method: "GET",
+ params: {
+ job_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/jobs/:job_id/logs"
+ },
+ listWorkflowRunArtifacts: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts"
+ },
+ listWorkflowRunLogs: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/logs"
+ },
+ listWorkflowRuns: {
+ method: "GET",
+ params: {
+ actor: {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ event: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["completed", "status", "conclusion"],
+ type: "string"
+ },
+ workflow_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs"
+ },
+ reRunWorkflow: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/rerun"
+ },
+ removeSelfHostedRunner: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ runner_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/:runner_id"
+ }
+ },
+ activity: {
+ checkStarringRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ },
+ deleteRepoSubscription: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ deleteThreadSubscription: {
+ method: "DELETE",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ getRepoSubscription: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ getThread: {
+ method: "GET",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id"
+ },
+ getThreadSubscription: {
+ method: "GET",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ listEventsForOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events/orgs/:org"
+ },
+ listEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events"
+ },
+ listFeeds: {
+ method: "GET",
+ params: {},
+ url: "/feeds"
+ },
+ listNotifications: {
+ method: "GET",
+ params: {
+ all: {
+ type: "boolean"
+ },
+ before: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ participating: {
+ type: "boolean"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/notifications"
+ },
+ listNotificationsForRepo: {
+ method: "GET",
+ params: {
+ all: {
+ type: "boolean"
+ },
+ before: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ participating: {
+ type: "boolean"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/notifications"
+ },
+ listPublicEvents: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/events"
+ },
+ listPublicEventsForOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/events"
+ },
+ listPublicEventsForRepoNetwork: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/networks/:owner/:repo/events"
+ },
+ listPublicEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events/public"
+ },
+ listReceivedEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/received_events"
+ },
+ listReceivedPublicEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/received_events/public"
+ },
+ listRepoEvents: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/events"
+ },
+ listReposStarredByAuthenticatedUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/user/starred"
+ },
+ listReposStarredByUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/starred"
+ },
+ listReposWatchedByUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/subscriptions"
+ },
+ listStargazersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stargazers"
+ },
+ listWatchedReposForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/subscriptions"
+ },
+ listWatchersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscribers"
+ },
+ markAsRead: {
+ method: "PUT",
+ params: {
+ last_read_at: {
+ type: "string"
+ }
+ },
+ url: "/notifications"
+ },
+ markNotificationsAsReadForRepo: {
+ method: "PUT",
+ params: {
+ last_read_at: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/notifications"
+ },
+ markThreadAsRead: {
+ method: "PATCH",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id"
+ },
+ setRepoSubscription: {
+ method: "PUT",
+ params: {
+ ignored: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ subscribed: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ setThreadSubscription: {
+ method: "PUT",
+ params: {
+ ignored: {
+ type: "boolean"
+ },
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ starRepo: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ },
+ unstarRepo: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ }
+ },
+ apps: {
+ addRepoToInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "PUT",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories/:repository_id"
+ },
+ checkAccountIsAssociatedWithAny: {
+ method: "GET",
+ params: {
+ account_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/accounts/:account_id"
+ },
+ checkAccountIsAssociatedWithAnyStubbed: {
+ method: "GET",
+ params: {
+ account_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/stubbed/accounts/:account_id"
+ },
+ checkAuthorization: {
+ deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization",
+ method: "GET",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ checkToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "POST",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ createContentAttachment: {
+ headers: {
+ accept: "application/vnd.github.corsair-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ content_reference_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/content_references/:content_reference_id/attachments"
+ },
+ createFromManifest: {
+ headers: {
+ accept: "application/vnd.github.fury-preview+json"
+ },
+ method: "POST",
+ params: {
+ code: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/app-manifests/:code/conversions"
+ },
+ createInstallationToken: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "POST",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ permissions: {
+ type: "object"
+ },
+ repository_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/app/installations/:installation_id/access_tokens"
+ },
+ deleteAuthorization: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grant"
+ },
+ deleteInstallation: {
+ headers: {
+ accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/app/installations/:installation_id"
+ },
+ deleteToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ findOrgInstallation: {
+ deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/installation"
+ },
+ findRepoInstallation: {
+ deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/installation"
+ },
+ findUserInstallation: {
+ deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/installation"
+ },
+ getAuthenticated: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {},
+ url: "/app"
+ },
+ getBySlug: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ app_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/apps/:app_slug"
+ },
+ getInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/app/installations/:installation_id"
+ },
+ getOrgInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/installation"
+ },
+ getRepoInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/installation"
+ },
+ getUserInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/installation"
+ },
+ listAccountsUserOrOrgOnPlan: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ plan_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/marketplace_listing/plans/:plan_id/accounts"
+ },
+ listAccountsUserOrOrgOnPlanStubbed: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ plan_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/marketplace_listing/stubbed/plans/:plan_id/accounts"
+ },
+ listInstallationReposForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories"
+ },
+ listInstallations: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/app/installations"
+ },
+ listInstallationsForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/installations"
+ },
+ listMarketplacePurchasesForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/marketplace_purchases"
+ },
+ listMarketplacePurchasesForAuthenticatedUserStubbed: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/marketplace_purchases/stubbed"
+ },
+ listPlans: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/plans"
+ },
+ listPlansStubbed: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/stubbed/plans"
+ },
+ listRepos: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/installation/repositories"
+ },
+ removeRepoFromInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories/:repository_id"
+ },
+ resetAuthorization: {
+ deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization",
+ method: "POST",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ resetToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ revokeAuthorizationForApplication: {
+ deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeGrantForApplication: {
+ deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grants/:access_token"
+ },
+ revokeInstallationToken: {
+ headers: {
+ accept: "application/vnd.github.gambit-preview+json"
+ },
+ method: "DELETE",
+ params: {},
+ url: "/installation/token"
+ }
+ },
+ checks: {
+ create: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ actions: {
+ type: "object[]"
+ },
+ "actions[].description": {
+ required: true,
+ type: "string"
+ },
+ "actions[].identifier": {
+ required: true,
+ type: "string"
+ },
+ "actions[].label": {
+ required: true,
+ type: "string"
+ },
+ completed_at: {
+ type: "string"
+ },
+ conclusion: {
+ enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
+ type: "string"
+ },
+ details_url: {
+ type: "string"
+ },
+ external_id: {
+ type: "string"
+ },
+ head_sha: {
+ required: true,
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ output: {
+ type: "object"
+ },
+ "output.annotations": {
+ type: "object[]"
+ },
+ "output.annotations[].annotation_level": {
+ enum: ["notice", "warning", "failure"],
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].end_column": {
+ type: "integer"
+ },
+ "output.annotations[].end_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].message": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].path": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].raw_details": {
+ type: "string"
+ },
+ "output.annotations[].start_column": {
+ type: "integer"
+ },
+ "output.annotations[].start_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].title": {
+ type: "string"
+ },
+ "output.images": {
+ type: "object[]"
+ },
+ "output.images[].alt": {
+ required: true,
+ type: "string"
+ },
+ "output.images[].caption": {
+ type: "string"
+ },
+ "output.images[].image_url": {
+ required: true,
+ type: "string"
+ },
+ "output.summary": {
+ required: true,
+ type: "string"
+ },
+ "output.text": {
+ type: "string"
+ },
+ "output.title": {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ started_at: {
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs"
+ },
+ createSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ head_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites"
+ },
+ get: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id"
+ },
+ getSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id"
+ },
+ listAnnotations: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations"
+ },
+ listForRef: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_name: {
+ type: "string"
+ },
+ filter: {
+ enum: ["latest", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/check-runs"
+ },
+ listForSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_name: {
+ type: "string"
+ },
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ filter: {
+ enum: ["latest", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs"
+ },
+ listSuitesForRef: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ app_id: {
+ type: "integer"
+ },
+ check_name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/check-suites"
+ },
+ rerequestSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest"
+ },
+ setSuitesPreferences: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ auto_trigger_checks: {
+ type: "object[]"
+ },
+ "auto_trigger_checks[].app_id": {
+ required: true,
+ type: "integer"
+ },
+ "auto_trigger_checks[].setting": {
+ required: true,
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/preferences"
+ },
+ update: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ actions: {
+ type: "object[]"
+ },
+ "actions[].description": {
+ required: true,
+ type: "string"
+ },
+ "actions[].identifier": {
+ required: true,
+ type: "string"
+ },
+ "actions[].label": {
+ required: true,
+ type: "string"
+ },
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ completed_at: {
+ type: "string"
+ },
+ conclusion: {
+ enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
+ type: "string"
+ },
+ details_url: {
+ type: "string"
+ },
+ external_id: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ output: {
+ type: "object"
+ },
+ "output.annotations": {
+ type: "object[]"
+ },
+ "output.annotations[].annotation_level": {
+ enum: ["notice", "warning", "failure"],
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].end_column": {
+ type: "integer"
+ },
+ "output.annotations[].end_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].message": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].path": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].raw_details": {
+ type: "string"
+ },
+ "output.annotations[].start_column": {
+ type: "integer"
+ },
+ "output.annotations[].start_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].title": {
+ type: "string"
+ },
+ "output.images": {
+ type: "object[]"
+ },
+ "output.images[].alt": {
+ required: true,
+ type: "string"
+ },
+ "output.images[].caption": {
+ type: "string"
+ },
+ "output.images[].image_url": {
+ required: true,
+ type: "string"
+ },
+ "output.summary": {
+ required: true,
+ type: "string"
+ },
+ "output.text": {
+ type: "string"
+ },
+ "output.title": {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ started_at: {
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id"
+ }
+ },
+ codesOfConduct: {
+ getConductCode: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {
+ key: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/codes_of_conduct/:key"
+ },
+ getForRepo: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/community/code_of_conduct"
+ },
+ listConductCodes: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {},
+ url: "/codes_of_conduct"
+ }
+ },
+ emojis: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/emojis"
+ }
+ },
+ gists: {
+ checkIsStarred: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ create: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ files: {
+ required: true,
+ type: "object"
+ },
+ "files.content": {
+ type: "string"
+ },
+ public: {
+ type: "boolean"
+ }
+ },
+ url: "/gists"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments"
+ },
+ delete: {
+ method: "DELETE",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ },
+ fork: {
+ method: "POST",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/forks"
+ },
+ get: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ },
+ getRevision: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/:sha"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/commits"
+ },
+ listForks: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/forks"
+ },
+ listPublic: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists/public"
+ },
+ listPublicForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/gists"
+ },
+ listStarred: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists/starred"
+ },
+ star: {
+ method: "PUT",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ unstar: {
+ method: "DELETE",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ files: {
+ type: "object"
+ },
+ "files.content": {
+ type: "string"
+ },
+ "files.filename": {
+ type: "string"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ }
+ },
+ git: {
+ createBlob: {
+ method: "POST",
+ params: {
+ content: {
+ required: true,
+ type: "string"
+ },
+ encoding: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/blobs"
+ },
+ createCommit: {
+ method: "POST",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.date": {
+ type: "string"
+ },
+ "author.email": {
+ type: "string"
+ },
+ "author.name": {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.date": {
+ type: "string"
+ },
+ "committer.email": {
+ type: "string"
+ },
+ "committer.name": {
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ parents: {
+ required: true,
+ type: "string[]"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ signature: {
+ type: "string"
+ },
+ tree: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/commits"
+ },
+ createRef: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs"
+ },
+ createTag: {
+ method: "POST",
+ params: {
+ message: {
+ required: true,
+ type: "string"
+ },
+ object: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag: {
+ required: true,
+ type: "string"
+ },
+ tagger: {
+ type: "object"
+ },
+ "tagger.date": {
+ type: "string"
+ },
+ "tagger.email": {
+ type: "string"
+ },
+ "tagger.name": {
+ type: "string"
+ },
+ type: {
+ enum: ["commit", "tree", "blob"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/tags"
+ },
+ createTree: {
+ method: "POST",
+ params: {
+ base_tree: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tree: {
+ required: true,
+ type: "object[]"
+ },
+ "tree[].content": {
+ type: "string"
+ },
+ "tree[].mode": {
+ enum: ["100644", "100755", "040000", "160000", "120000"],
+ type: "string"
+ },
+ "tree[].path": {
+ type: "string"
+ },
+ "tree[].sha": {
+ allowNull: true,
+ type: "string"
+ },
+ "tree[].type": {
+ enum: ["blob", "tree", "commit"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/trees"
+ },
+ deleteRef: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:ref"
+ },
+ getBlob: {
+ method: "GET",
+ params: {
+ file_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/blobs/:file_sha"
+ },
+ getCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/commits/:commit_sha"
+ },
+ getRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/ref/:ref"
+ },
+ getTag: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/tags/:tag_sha"
+ },
+ getTree: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ recursive: {
+ enum: ["1"],
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tree_sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/trees/:tree_sha"
+ },
+ listMatchingRefs: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/matching-refs/:ref"
+ },
+ listRefs: {
+ method: "GET",
+ params: {
+ namespace: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:namespace"
+ },
+ updateRef: {
+ method: "PATCH",
+ params: {
+ force: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:ref"
+ }
+ },
+ gitignore: {
+ getTemplate: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gitignore/templates/:name"
+ },
+ listTemplates: {
+ method: "GET",
+ params: {},
+ url: "/gitignore/templates"
+ }
+ },
+ interactions: {
+ addOrUpdateRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "PUT",
+ params: {
+ limit: {
+ enum: ["existing_users", "contributors_only", "collaborators_only"],
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ addOrUpdateRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "PUT",
+ params: {
+ limit: {
+ enum: ["existing_users", "contributors_only", "collaborators_only"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ },
+ getRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ getRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ },
+ removeRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ removeRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ }
+ },
+ issues: {
+ addAssignees: {
+ method: "POST",
+ params: {
+ assignees: {
+ type: "string[]"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/assignees"
+ },
+ addLabels: {
+ method: "POST",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ required: true,
+ type: "string[]"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ checkAssignee: {
+ method: "GET",
+ params: {
+ assignee: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/assignees/:assignee"
+ },
+ create: {
+ method: "POST",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ assignees: {
+ type: "string[]"
+ },
+ body: {
+ type: "string"
+ },
+ labels: {
+ type: "string[]"
+ },
+ milestone: {
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/comments"
+ },
+ createLabel: {
+ method: "POST",
+ params: {
+ color: {
+ required: true,
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels"
+ },
+ createMilestone: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ due_on: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ deleteLabel: {
+ method: "DELETE",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:name"
+ },
+ deleteMilestone: {
+ method: "DELETE",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ },
+ get: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ getEvent: {
+ method: "GET",
+ params: {
+ event_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/events/:event_id"
+ },
+ getLabel: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:name"
+ },
+ getMilestone: {
+ method: "GET",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ },
+ list: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/issues"
+ },
+ listAssignees: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/assignees"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/comments"
+ },
+ listCommentsForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments"
+ },
+ listEvents: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/events"
+ },
+ listEventsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/events"
+ },
+ listEventsForTimeline: {
+ headers: {
+ accept: "application/vnd.github.mockingbird-preview+json"
+ },
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/timeline"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/user/issues"
+ },
+ listForOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/issues"
+ },
+ listForRepo: {
+ method: "GET",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ creator: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ mentioned: {
+ type: "string"
+ },
+ milestone: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues"
+ },
+ listLabelsForMilestone: {
+ method: "GET",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number/labels"
+ },
+ listLabelsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels"
+ },
+ listLabelsOnIssue: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ listMilestonesForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["due_on", "completeness"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones"
+ },
+ lock: {
+ method: "PUT",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ lock_reason: {
+ enum: ["off-topic", "too heated", "resolved", "spam"],
+ type: "string"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/lock"
+ },
+ removeAssignees: {
+ method: "DELETE",
+ params: {
+ assignees: {
+ type: "string[]"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/assignees"
+ },
+ removeLabel: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels/:name"
+ },
+ removeLabels: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ replaceLabels: {
+ method: "PUT",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ type: "string[]"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ unlock: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/lock"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ assignees: {
+ type: "string[]"
+ },
+ body: {
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ type: "string[]"
+ },
+ milestone: {
+ allowNull: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ updateLabel: {
+ method: "PATCH",
+ params: {
+ color: {
+ type: "string"
+ },
+ current_name: {
+ required: true,
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:current_name"
+ },
+ updateMilestone: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ due_on: {
+ type: "string"
+ },
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ }
+ },
+ licenses: {
+ get: {
+ method: "GET",
+ params: {
+ license: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/licenses/:license"
+ },
+ getForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/license"
+ },
+ list: {
+ deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)",
+ method: "GET",
+ params: {},
+ url: "/licenses"
+ },
+ listCommonlyUsed: {
+ method: "GET",
+ params: {},
+ url: "/licenses"
+ }
+ },
+ markdown: {
+ render: {
+ method: "POST",
+ params: {
+ context: {
+ type: "string"
+ },
+ mode: {
+ enum: ["markdown", "gfm"],
+ type: "string"
+ },
+ text: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/markdown"
+ },
+ renderRaw: {
+ headers: {
+ "content-type": "text/plain; charset=utf-8"
+ },
+ method: "POST",
+ params: {
+ data: {
+ mapTo: "data",
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/markdown/raw"
+ }
+ },
+ meta: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/meta"
+ }
+ },
+ migrations: {
+ cancelImport: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ deleteArchiveForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id/archive"
+ },
+ deleteArchiveForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ downloadArchiveForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ getArchiveForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id/archive"
+ },
+ getArchiveForOrg: {
+ deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)",
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ getCommitAuthors: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/authors"
+ },
+ getImportProgress: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ getLargeFiles: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/large_files"
+ },
+ getStatusForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id"
+ },
+ getStatusForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id"
+ },
+ listForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/migrations"
+ },
+ listForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/migrations"
+ },
+ listReposForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/repositories"
+ },
+ listReposForUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/:migration_id/repositories"
+ },
+ mapCommitAuthor: {
+ method: "PATCH",
+ params: {
+ author_id: {
+ required: true,
+ type: "integer"
+ },
+ email: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/authors/:author_id"
+ },
+ setLfsPreference: {
+ method: "PATCH",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ use_lfs: {
+ enum: ["opt_in", "opt_out"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/lfs"
+ },
+ startForAuthenticatedUser: {
+ method: "POST",
+ params: {
+ exclude_attachments: {
+ type: "boolean"
+ },
+ lock_repositories: {
+ type: "boolean"
+ },
+ repositories: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/migrations"
+ },
+ startForOrg: {
+ method: "POST",
+ params: {
+ exclude_attachments: {
+ type: "boolean"
+ },
+ lock_repositories: {
+ type: "boolean"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ repositories: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/orgs/:org/migrations"
+ },
+ startImport: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tfvc_project: {
+ type: "string"
+ },
+ vcs: {
+ enum: ["subversion", "git", "mercurial", "tfvc"],
+ type: "string"
+ },
+ vcs_password: {
+ type: "string"
+ },
+ vcs_url: {
+ required: true,
+ type: "string"
+ },
+ vcs_username: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ unlockRepoForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ repo_name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/migrations/:migration_id/repos/:repo_name/lock"
+ },
+ unlockRepoForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ repo_name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock"
+ },
+ updateImport: {
+ method: "PATCH",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ vcs_password: {
+ type: "string"
+ },
+ vcs_username: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ }
+ },
+ oauthAuthorizations: {
+ checkAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)",
+ method: "GET",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ createAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization",
+ method: "POST",
+ params: {
+ client_id: {
+ type: "string"
+ },
+ client_secret: {
+ type: "string"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ required: true,
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations"
+ },
+ deleteAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization",
+ method: "DELETE",
+ params: {
+ authorization_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ },
+ deleteGrant: {
+ deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant",
+ method: "DELETE",
+ params: {
+ grant_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/applications/grants/:grant_id"
+ },
+ getAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization",
+ method: "GET",
+ params: {
+ authorization_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ },
+ getGrant: {
+ deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant",
+ method: "GET",
+ params: {
+ grant_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/applications/grants/:grant_id"
+ },
+ getOrCreateAuthorizationForApp: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id"
+ },
+ getOrCreateAuthorizationForAppAndFingerprint: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ required: true,
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id/:fingerprint"
+ },
+ getOrCreateAuthorizationForAppFingerprint: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ required: true,
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id/:fingerprint"
+ },
+ listAuthorizations: {
+ deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/authorizations"
+ },
+ listGrants: {
+ deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/applications/grants"
+ },
+ resetAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)",
+ method: "POST",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeAuthorizationForApplication: {
+ deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeGrantForApplication: {
+ deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grants/:access_token"
+ },
+ updateAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization",
+ method: "PATCH",
+ params: {
+ add_scopes: {
+ type: "string[]"
+ },
+ authorization_id: {
+ required: true,
+ type: "integer"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ remove_scopes: {
+ type: "string[]"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ }
+ },
+ orgs: {
+ addOrUpdateMembership: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["admin", "member"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ blockUser: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ checkBlockedUser: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ checkMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members/:username"
+ },
+ checkPublicMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ concealMembership: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ convertMemberToOutsideCollaborator: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators/:username"
+ },
+ createHook: {
+ method: "POST",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ required: true,
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks"
+ },
+ createInvitation: {
+ method: "POST",
+ params: {
+ email: {
+ type: "string"
+ },
+ invitee_id: {
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["admin", "direct_member", "billing_manager"],
+ type: "string"
+ },
+ team_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/orgs/:org/invitations"
+ },
+ deleteHook: {
+ method: "DELETE",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ get: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org"
+ },
+ getHook: {
+ method: "GET",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ getMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ getMembershipForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs/:org"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "integer"
+ }
+ },
+ url: "/organizations"
+ },
+ listBlockedUsers: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/orgs"
+ },
+ listForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/orgs"
+ },
+ listHooks: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/hooks"
+ },
+ listInstallations: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/installations"
+ },
+ listInvitationTeams: {
+ method: "GET",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/invitations/:invitation_id/teams"
+ },
+ listMembers: {
+ method: "GET",
+ params: {
+ filter: {
+ enum: ["2fa_disabled", "all"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["all", "admin", "member"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members"
+ },
+ listMemberships: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["active", "pending"],
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs"
+ },
+ listOutsideCollaborators: {
+ method: "GET",
+ params: {
+ filter: {
+ enum: ["2fa_disabled", "all"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators"
+ },
+ listPendingInvitations: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/invitations"
+ },
+ listPublicMembers: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/public_members"
+ },
+ pingHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id/pings"
+ },
+ publicizeMembership: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ removeMember: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members/:username"
+ },
+ removeMembership: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ removeOutsideCollaborator: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators/:username"
+ },
+ unblockUser: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ billing_email: {
+ type: "string"
+ },
+ company: {
+ type: "string"
+ },
+ default_repository_permission: {
+ enum: ["read", "write", "admin", "none"],
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ email: {
+ type: "string"
+ },
+ has_organization_projects: {
+ type: "boolean"
+ },
+ has_repository_projects: {
+ type: "boolean"
+ },
+ location: {
+ type: "string"
+ },
+ members_allowed_repository_creation_type: {
+ enum: ["all", "private", "none"],
+ type: "string"
+ },
+ members_can_create_internal_repositories: {
+ type: "boolean"
+ },
+ members_can_create_private_repositories: {
+ type: "boolean"
+ },
+ members_can_create_public_repositories: {
+ type: "boolean"
+ },
+ members_can_create_repositories: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org"
+ },
+ updateHook: {
+ method: "PATCH",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ updateMembership: {
+ method: "PATCH",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["active"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs/:org"
+ }
+ },
+ projects: {
+ addCollaborator: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username"
+ },
+ createCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ content_id: {
+ type: "integer"
+ },
+ content_type: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ }
+ },
+ url: "/projects/columns/:column_id/cards"
+ },
+ createColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/columns"
+ },
+ createForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/projects"
+ },
+ createForOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/projects"
+ },
+ createForRepo: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/projects"
+ },
+ delete: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ deleteCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ deleteColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ },
+ get: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ getCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ getColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ },
+ listCards: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ archived_state: {
+ enum: ["all", "archived", "not_archived"],
+ type: "string"
+ },
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id/cards"
+ },
+ listCollaborators: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ affiliation: {
+ enum: ["outside", "direct", "all"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/collaborators"
+ },
+ listColumns: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/columns"
+ },
+ listForOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/projects"
+ },
+ listForRepo: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/projects"
+ },
+ listForUser: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/projects"
+ },
+ moveCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ },
+ column_id: {
+ type: "integer"
+ },
+ position: {
+ required: true,
+ type: "string",
+ validation: "^(top|bottom|after:\\d+)$"
+ }
+ },
+ url: "/projects/columns/cards/:card_id/moves"
+ },
+ moveColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ position: {
+ required: true,
+ type: "string",
+ validation: "^(first|last|after:\\d+)$"
+ }
+ },
+ url: "/projects/columns/:column_id/moves"
+ },
+ removeCollaborator: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username"
+ },
+ reviewUserPermissionLevel: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username/permission"
+ },
+ update: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ organization_permission: {
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ updateCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ archived: {
+ type: "boolean"
+ },
+ card_id: {
+ required: true,
+ type: "integer"
+ },
+ note: {
+ type: "string"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ updateColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ }
+ },
+ pulls: {
+ checkIfMerged: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/merge"
+ },
+ create: {
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_id: {
+ required: true,
+ type: "string"
+ },
+ in_reply_to: {
+ deprecated: true,
+ description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
+ type: "integer"
+ },
+ line: {
+ type: "integer"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ side: {
+ enum: ["LEFT", "RIGHT"],
+ type: "string"
+ },
+ start_line: {
+ type: "integer"
+ },
+ start_side: {
+ enum: ["LEFT", "RIGHT", "side"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ createCommentReply: {
+ deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_id: {
+ required: true,
+ type: "string"
+ },
+ in_reply_to: {
+ deprecated: true,
+ description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
+ type: "integer"
+ },
+ line: {
+ type: "integer"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ side: {
+ enum: ["LEFT", "RIGHT"],
+ type: "string"
+ },
+ start_line: {
+ type: "integer"
+ },
+ start_side: {
+ enum: ["LEFT", "RIGHT", "side"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ createFromIssue: {
+ deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request",
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ issue: {
+ required: true,
+ type: "integer"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ createReview: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ comments: {
+ type: "object[]"
+ },
+ "comments[].body": {
+ required: true,
+ type: "string"
+ },
+ "comments[].path": {
+ required: true,
+ type: "string"
+ },
+ "comments[].position": {
+ required: true,
+ type: "integer"
+ },
+ commit_id: {
+ type: "string"
+ },
+ event: {
+ enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
+ },
+ createReviewCommentReply: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies"
+ },
+ createReviewRequest: {
+ method: "POST",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ reviewers: {
+ type: "string[]"
+ },
+ team_reviewers: {
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ deletePendingReview: {
+ method: "DELETE",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ },
+ deleteReviewRequest: {
+ method: "DELETE",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ reviewers: {
+ type: "string[]"
+ },
+ team_reviewers: {
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ dismissReview: {
+ method: "PUT",
+ params: {
+ message: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals"
+ },
+ get: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ getCommentsForReview: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments"
+ },
+ getReview: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ },
+ list: {
+ method: "GET",
+ params: {
+ base: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ head: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "popularity", "long-running"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ listCommentsForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/commits"
+ },
+ listFiles: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/files"
+ },
+ listReviewRequests: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ listReviews: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
+ },
+ merge: {
+ method: "PUT",
+ params: {
+ commit_message: {
+ type: "string"
+ },
+ commit_title: {
+ type: "string"
+ },
+ merge_method: {
+ enum: ["merge", "squash", "rebase"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/merge"
+ },
+ submitReview: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ event: {
+ enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ base: {
+ type: "string"
+ },
+ body: {
+ type: "string"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number"
+ },
+ updateBranch: {
+ headers: {
+ accept: "application/vnd.github.lydian-preview+json"
+ },
+ method: "PUT",
+ params: {
+ expected_head_sha: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/update-branch"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ updateReview: {
+ method: "PUT",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ }
+ },
+ rateLimit: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/rate_limit"
+ }
+ },
+ reactions: {
+ createForCommitComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id/reactions"
+ },
+ createForIssue: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/reactions"
+ },
+ createForIssueComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
+ },
+ createForPullRequestReviewComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
+ },
+ createForTeamDiscussion: {
+ deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ createForTeamDiscussionComment: {
+ deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionCommentInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionCommentLegacy: {
+ deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
+ },
+ createForTeamDiscussionLegacy: {
+ deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ delete: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ reaction_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/reactions/:reaction_id"
+ },
+ listForCommitComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id/reactions"
+ },
+ listForIssue: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/reactions"
+ },
+ listForIssueComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
+ },
+ listForPullRequestReviewComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
+ },
+ listForTeamDiscussion: {
+ deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ listForTeamDiscussionComment: {
+ deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionCommentInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionCommentLegacy: {
+ deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
+ },
+ listForTeamDiscussionLegacy: {
+ deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ }
+ },
+ repos: {
+ acceptInvitation: {
+ method: "PATCH",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations/:invitation_id"
+ },
+ addCollaborator: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ addDeployKey: {
+ method: "POST",
+ params: {
+ key: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ read_only: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys"
+ },
+ addProtectedBranchAdminEnforcement: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ addProtectedBranchAppRestrictions: {
+ method: "POST",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ addProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ addProtectedBranchRequiredStatusChecksContexts: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ addProtectedBranchTeamRestrictions: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ addProtectedBranchUserRestrictions: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ checkCollaborator: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ checkVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ compareCommits: {
+ method: "GET",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/compare/:base...:head"
+ },
+ createCommitComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ line: {
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ alias: "commit_sha",
+ deprecated: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/comments"
+ },
+ createDeployment: {
+ method: "POST",
+ params: {
+ auto_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ environment: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ payload: {
+ type: "string"
+ },
+ production_environment: {
+ type: "boolean"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ required_contexts: {
+ type: "string[]"
+ },
+ task: {
+ type: "string"
+ },
+ transient_environment: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments"
+ },
+ createDeploymentStatus: {
+ method: "POST",
+ params: {
+ auto_inactive: {
+ type: "boolean"
+ },
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ description: {
+ type: "string"
+ },
+ environment: {
+ enum: ["production", "staging", "qa"],
+ type: "string"
+ },
+ environment_url: {
+ type: "string"
+ },
+ log_url: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"],
+ required: true,
+ type: "string"
+ },
+ target_url: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
+ },
+ createDispatchEvent: {
+ method: "POST",
+ params: {
+ client_payload: {
+ type: "object"
+ },
+ event_type: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/dispatches"
+ },
+ createFile: {
+ deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ createForAuthenticatedUser: {
+ method: "POST",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ auto_init: {
+ type: "boolean"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ gitignore_template: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ license_template: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ type: "integer"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/user/repos"
+ },
+ createFork: {
+ method: "POST",
+ params: {
+ organization: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/forks"
+ },
+ createHook: {
+ method: "POST",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ required: true,
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks"
+ },
+ createInOrg: {
+ method: "POST",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ auto_init: {
+ type: "boolean"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ gitignore_template: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ license_template: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ type: "integer"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/repos"
+ },
+ createOrUpdateFile: {
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ createRelease: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ prerelease: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_name: {
+ required: true,
+ type: "string"
+ },
+ target_commitish: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases"
+ },
+ createStatus: {
+ method: "POST",
+ params: {
+ context: {
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["error", "failure", "pending", "success"],
+ required: true,
+ type: "string"
+ },
+ target_url: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/statuses/:sha"
+ },
+ createUsingTemplate: {
+ headers: {
+ accept: "application/vnd.github.baptiste-preview+json"
+ },
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ template_owner: {
+ required: true,
+ type: "string"
+ },
+ template_repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:template_owner/:template_repo/generate"
+ },
+ declineInvitation: {
+ method: "DELETE",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations/:invitation_id"
+ },
+ delete: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ deleteCommitComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ deleteDownload: {
+ method: "DELETE",
+ params: {
+ download_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads/:download_id"
+ },
+ deleteFile: {
+ method: "DELETE",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ type: "string"
+ },
+ "author.name": {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ type: "string"
+ },
+ "committer.name": {
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ deleteHook: {
+ method: "DELETE",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ deleteInvitation: {
+ method: "DELETE",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations/:invitation_id"
+ },
+ deleteRelease: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ deleteReleaseAsset: {
+ method: "DELETE",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ disableAutomatedSecurityFixes: {
+ headers: {
+ accept: "application/vnd.github.london-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/automated-security-fixes"
+ },
+ disablePagesSite: {
+ headers: {
+ accept: "application/vnd.github.switcheroo-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ disableVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ enableAutomatedSecurityFixes: {
+ headers: {
+ accept: "application/vnd.github.london-preview+json"
+ },
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/automated-security-fixes"
+ },
+ enablePagesSite: {
+ headers: {
+ accept: "application/vnd.github.switcheroo-preview+json"
+ },
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ source: {
+ type: "object"
+ },
+ "source.branch": {
+ enum: ["master", "gh-pages"],
+ type: "string"
+ },
+ "source.path": {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ enableVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ get: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ getAppsWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ getArchiveLink: {
+ method: "GET",
+ params: {
+ archive_format: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/:archive_format/:ref"
+ },
+ getBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch"
+ },
+ getBranchProtection: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ getClones: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ per: {
+ enum: ["day", "week"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/clones"
+ },
+ getCodeFrequencyStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/code_frequency"
+ },
+ getCollaboratorPermissionLevel: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username/permission"
+ },
+ getCombinedStatusForRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/status"
+ },
+ getCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ alias: "ref",
+ deprecated: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ alias: "ref",
+ deprecated: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref"
+ },
+ getCommitActivityStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/commit_activity"
+ },
+ getCommitComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ getCommitRefSha: {
+ deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit",
+ headers: {
+ accept: "application/vnd.github.v3.sha"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref"
+ },
+ getContents: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ getContributorsStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/contributors"
+ },
+ getDeployKey: {
+ method: "GET",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys/:key_id"
+ },
+ getDeployment: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id"
+ },
+ getDeploymentStatus: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id"
+ },
+ getDownload: {
+ method: "GET",
+ params: {
+ download_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads/:download_id"
+ },
+ getHook: {
+ method: "GET",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ getLatestPagesBuild: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds/latest"
+ },
+ getLatestRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/latest"
+ },
+ getPages: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ getPagesBuild: {
+ method: "GET",
+ params: {
+ build_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds/:build_id"
+ },
+ getParticipationStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/participation"
+ },
+ getProtectedBranchAdminEnforcement: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ getProtectedBranchPullRequestReviewEnforcement: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ getProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ getProtectedBranchRequiredStatusChecks: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ getProtectedBranchRestrictions: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
+ },
+ getPunchCardStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/punch_card"
+ },
+ getReadme: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/readme"
+ },
+ getRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ getReleaseAsset: {
+ method: "GET",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ getReleaseByTag: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/tags/:tag"
+ },
+ getTeamsWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ getTopPaths: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/popular/paths"
+ },
+ getTopReferrers: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/popular/referrers"
+ },
+ getUsersWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ getViews: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ per: {
+ enum: ["day", "week"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/views"
+ },
+ list: {
+ method: "GET",
+ params: {
+ affiliation: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "owner", "public", "private", "member"],
+ type: "string"
+ },
+ visibility: {
+ enum: ["all", "public", "private"],
+ type: "string"
+ }
+ },
+ url: "/user/repos"
+ },
+ listAppsWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ listAssetsForRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id/assets"
+ },
+ listBranches: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ protected: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches"
+ },
+ listBranchesForHeadCommit: {
+ headers: {
+ accept: "application/vnd.github.groot-preview+json"
+ },
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head"
+ },
+ listCollaborators: {
+ method: "GET",
+ params: {
+ affiliation: {
+ enum: ["outside", "direct", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators"
+ },
+ listCommentsForCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ alias: "commit_sha",
+ deprecated: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/comments"
+ },
+ listCommitComments: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ author: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ path: {
+ type: "string"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ until: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits"
+ },
+ listContributors: {
+ method: "GET",
+ params: {
+ anon: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contributors"
+ },
+ listDeployKeys: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys"
+ },
+ listDeploymentStatuses: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
+ },
+ listDeployments: {
+ method: "GET",
+ params: {
+ environment: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ },
+ task: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments"
+ },
+ listDownloads: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads"
+ },
+ listForOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "public", "private", "forks", "sources", "member", "internal"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/repos"
+ },
+ listForUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "owner", "member"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/repos"
+ },
+ listForks: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["newest", "oldest", "stargazers"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/forks"
+ },
+ listHooks: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks"
+ },
+ listInvitations: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations"
+ },
+ listInvitationsForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations"
+ },
+ listLanguages: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/languages"
+ },
+ listPagesBuilds: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds"
+ },
+ listProtectedBranchRequiredStatusChecksContexts: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ listProtectedBranchTeamRestrictions: {
+ deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ listProtectedBranchUserRestrictions: {
+ deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ listPublic: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "integer"
+ }
+ },
+ url: "/repositories"
+ },
+ listPullRequestsAssociatedWithCommit: {
+ headers: {
+ accept: "application/vnd.github.groot-preview+json"
+ },
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/pulls"
+ },
+ listReleases: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases"
+ },
+ listStatusesForRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/statuses"
+ },
+ listTags: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/tags"
+ },
+ listTeams: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/teams"
+ },
+ listTeamsWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ listTopics: {
+ headers: {
+ accept: "application/vnd.github.mercy-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/topics"
+ },
+ listUsersWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ merge: {
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ commit_message: {
+ type: "string"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/merges"
+ },
+ pingHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id/pings"
+ },
+ removeBranchProtection: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ removeCollaborator: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ removeDeployKey: {
+ method: "DELETE",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys/:key_id"
+ },
+ removeProtectedBranchAdminEnforcement: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ removeProtectedBranchAppRestrictions: {
+ method: "DELETE",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ removeProtectedBranchPullRequestReviewEnforcement: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ removeProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ removeProtectedBranchRequiredStatusChecks: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ removeProtectedBranchRequiredStatusChecksContexts: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ removeProtectedBranchRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
+ },
+ removeProtectedBranchTeamRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ removeProtectedBranchUserRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ replaceProtectedBranchAppRestrictions: {
+ method: "PUT",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ replaceProtectedBranchRequiredStatusChecksContexts: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ replaceProtectedBranchTeamRestrictions: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ replaceProtectedBranchUserRestrictions: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ replaceTopics: {
+ headers: {
+ accept: "application/vnd.github.mercy-preview+json"
+ },
+ method: "PUT",
+ params: {
+ names: {
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/topics"
+ },
+ requestPageBuild: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds"
+ },
+ retrieveCommunityProfileMetrics: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/community/profile"
+ },
+ testPushHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id/tests"
+ },
+ transfer: {
+ method: "POST",
+ params: {
+ new_owner: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/repos/:owner/:repo/transfer"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ archived: {
+ type: "boolean"
+ },
+ default_branch: {
+ type: "string"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ updateBranchProtection: {
+ method: "PUT",
+ params: {
+ allow_deletions: {
+ type: "boolean"
+ },
+ allow_force_pushes: {
+ allowNull: true,
+ type: "boolean"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ enforce_admins: {
+ allowNull: true,
+ required: true,
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ required_linear_history: {
+ type: "boolean"
+ },
+ required_pull_request_reviews: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "required_pull_request_reviews.dismiss_stale_reviews": {
+ type: "boolean"
+ },
+ "required_pull_request_reviews.dismissal_restrictions": {
+ type: "object"
+ },
+ "required_pull_request_reviews.dismissal_restrictions.teams": {
+ type: "string[]"
+ },
+ "required_pull_request_reviews.dismissal_restrictions.users": {
+ type: "string[]"
+ },
+ "required_pull_request_reviews.require_code_owner_reviews": {
+ type: "boolean"
+ },
+ "required_pull_request_reviews.required_approving_review_count": {
+ type: "integer"
+ },
+ required_status_checks: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "required_status_checks.contexts": {
+ required: true,
+ type: "string[]"
+ },
+ "required_status_checks.strict": {
+ required: true,
+ type: "boolean"
+ },
+ restrictions: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "restrictions.apps": {
+ type: "string[]"
+ },
+ "restrictions.teams": {
+ required: true,
+ type: "string[]"
+ },
+ "restrictions.users": {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ updateCommitComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ updateFile: {
+ deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ updateHook: {
+ method: "PATCH",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ add_events: {
+ type: "string[]"
+ },
+ config: {
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ remove_events: {
+ type: "string[]"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ updateInformationAboutPagesSite: {
+ method: "PUT",
+ params: {
+ cname: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ source: {
+ enum: ['"gh-pages"', '"master"', '"master /docs"'],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ updateInvitation: {
+ method: "PATCH",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permissions: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations/:invitation_id"
+ },
+ updateProtectedBranchPullRequestReviewEnforcement: {
+ method: "PATCH",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ dismiss_stale_reviews: {
+ type: "boolean"
+ },
+ dismissal_restrictions: {
+ type: "object"
+ },
+ "dismissal_restrictions.teams": {
+ type: "string[]"
+ },
+ "dismissal_restrictions.users": {
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ require_code_owner_reviews: {
+ type: "boolean"
+ },
+ required_approving_review_count: {
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ updateProtectedBranchRequiredStatusChecks: {
+ method: "PATCH",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ strict: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ updateRelease: {
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ prerelease: {
+ type: "boolean"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_name: {
+ type: "string"
+ },
+ target_commitish: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ updateReleaseAsset: {
+ method: "PATCH",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ label: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ uploadReleaseAsset: {
+ method: "POST",
+ params: {
+ data: {
+ mapTo: "data",
+ required: true,
+ type: "string | object"
+ },
+ file: {
+ alias: "data",
+ deprecated: true,
+ type: "string | object"
+ },
+ headers: {
+ required: true,
+ type: "object"
+ },
+ "headers.content-length": {
+ required: true,
+ type: "integer"
+ },
+ "headers.content-type": {
+ required: true,
+ type: "string"
+ },
+ label: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ url: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: ":url"
+ }
+ },
+ search: {
+ code: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["indexed"],
+ type: "string"
+ }
+ },
+ url: "/search/code"
+ },
+ commits: {
+ headers: {
+ accept: "application/vnd.github.cloak-preview+json"
+ },
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["author-date", "committer-date"],
+ type: "string"
+ }
+ },
+ url: "/search/commits"
+ },
+ issues: {
+ deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)",
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/issues"
+ },
+ issuesAndPullRequests: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/issues"
+ },
+ labels: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/labels"
+ },
+ repos: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["stars", "forks", "help-wanted-issues", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/repositories"
+ },
+ topics: {
+ method: "GET",
+ params: {
+ q: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/search/topics"
+ },
+ users: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["followers", "repositories", "joined"],
+ type: "string"
+ }
+ },
+ url: "/search/users"
+ }
+ },
+ teams: {
+ addMember: {
+ deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ addMemberLegacy: {
+ deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy",
+ method: "PUT",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ addOrUpdateMembership: {
+ deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ addOrUpdateMembershipInOrg: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ addOrUpdateMembershipLegacy: {
+ deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy",
+ method: "PUT",
+ params: {
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ addOrUpdateProject: {
+ deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ addOrUpdateProjectInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ addOrUpdateProjectLegacy: {
+ deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ addOrUpdateRepo: {
+ deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ addOrUpdateRepoInOrg: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ addOrUpdateRepoLegacy: {
+ deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy",
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ checkManagesRepo: {
+ deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ checkManagesRepoInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ checkManagesRepoLegacy: {
+ deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy",
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ create: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ maintainers: {
+ type: "string[]"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ repo_names: {
+ type: "string[]"
+ }
+ },
+ url: "/orgs/:org/teams"
+ },
+ createDiscussion: {
+ deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ createDiscussionComment: {
+ deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ createDiscussionCommentInOrg: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
+ },
+ createDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ createDiscussionInOrg: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions"
+ },
+ createDiscussionLegacy: {
+ deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ delete: {
+ deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ deleteDiscussion: {
+ deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ deleteDiscussionComment: {
+ deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionCommentInOrg: {
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy",
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionInOrg: {
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ deleteDiscussionLegacy: {
+ deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy",
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ deleteInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ deleteLegacy: {
+ deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ get: {
+ deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ getByName: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ getDiscussion: {
+ deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ getDiscussionComment: {
+ deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionCommentInOrg: {
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy",
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionInOrg: {
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ getDiscussionLegacy: {
+ deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy",
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ getLegacy: {
+ deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ getMember: {
+ deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ getMemberLegacy: {
+ deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ getMembership: {
+ deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ getMembershipInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ getMembershipLegacy: {
+ deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ list: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/teams"
+ },
+ listChild: {
+ deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/teams"
+ },
+ listChildInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/teams"
+ },
+ listChildLegacy: {
+ deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/teams"
+ },
+ listDiscussionComments: {
+ deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ listDiscussionCommentsInOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
+ },
+ listDiscussionCommentsLegacy: {
+ deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ listDiscussions: {
+ deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ listDiscussionsInOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions"
+ },
+ listDiscussionsLegacy: {
+ deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/teams"
+ },
+ listMembers: {
+ deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/members"
+ },
+ listMembersInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/members"
+ },
+ listMembersLegacy: {
+ deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/members"
+ },
+ listPendingInvitations: {
+ deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/invitations"
+ },
+ listPendingInvitationsInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/invitations"
+ },
+ listPendingInvitationsLegacy: {
+ deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/invitations"
+ },
+ listProjects: {
+ deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects"
+ },
+ listProjectsInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects"
+ },
+ listProjectsLegacy: {
+ deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects"
+ },
+ listRepos: {
+ deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos"
+ },
+ listReposInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos"
+ },
+ listReposLegacy: {
+ deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos"
+ },
+ removeMember: {
+ deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ removeMemberLegacy: {
+ deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ removeMembership: {
+ deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ removeMembershipInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ removeMembershipLegacy: {
+ deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ removeProject: {
+ deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ removeProjectInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ removeProjectLegacy: {
+ deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy",
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ removeRepo: {
+ deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ removeRepoInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ removeRepoLegacy: {
+ deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy",
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ reviewProject: {
+ deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ reviewProjectInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ reviewProjectLegacy: {
+ deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ update: {
+ deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ updateDiscussion: {
+ deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ updateDiscussionComment: {
+ deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionCommentInOrg: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy",
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionInOrg: {
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ updateDiscussionLegacy: {
+ deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy",
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ updateInOrg: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ updateLegacy: {
+ deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy",
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ }
+ },
+ users: {
+ addEmails: {
+ method: "POST",
+ params: {
+ emails: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/emails"
+ },
+ block: {
+ method: "PUT",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ checkBlocked: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ checkFollowing: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ checkFollowingForUser: {
+ method: "GET",
+ params: {
+ target_user: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/following/:target_user"
+ },
+ createGpgKey: {
+ method: "POST",
+ params: {
+ armored_public_key: {
+ type: "string"
+ }
+ },
+ url: "/user/gpg_keys"
+ },
+ createPublicKey: {
+ method: "POST",
+ params: {
+ key: {
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/user/keys"
+ },
+ deleteEmails: {
+ method: "DELETE",
+ params: {
+ emails: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/emails"
+ },
+ deleteGpgKey: {
+ method: "DELETE",
+ params: {
+ gpg_key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys/:gpg_key_id"
+ },
+ deletePublicKey: {
+ method: "DELETE",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/keys/:key_id"
+ },
+ follow: {
+ method: "PUT",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ getAuthenticated: {
+ method: "GET",
+ params: {},
+ url: "/user"
+ },
+ getByUsername: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username"
+ },
+ getContextForUser: {
+ method: "GET",
+ params: {
+ subject_id: {
+ type: "string"
+ },
+ subject_type: {
+ enum: ["organization", "repository", "issue", "pull_request"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/hovercard"
+ },
+ getGpgKey: {
+ method: "GET",
+ params: {
+ gpg_key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys/:gpg_key_id"
+ },
+ getPublicKey: {
+ method: "GET",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/keys/:key_id"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/users"
+ },
+ listBlocked: {
+ method: "GET",
+ params: {},
+ url: "/user/blocks"
+ },
+ listEmails: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/emails"
+ },
+ listFollowersForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/followers"
+ },
+ listFollowersForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/followers"
+ },
+ listFollowingForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/following"
+ },
+ listFollowingForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/following"
+ },
+ listGpgKeys: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys"
+ },
+ listGpgKeysForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/gpg_keys"
+ },
+ listPublicEmails: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/public_emails"
+ },
+ listPublicKeys: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/keys"
+ },
+ listPublicKeysForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/keys"
+ },
+ togglePrimaryEmailVisibility: {
+ method: "PATCH",
+ params: {
+ email: {
+ required: true,
+ type: "string"
+ },
+ visibility: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/email/visibility"
+ },
+ unblock: {
+ method: "DELETE",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ unfollow: {
+ method: "DELETE",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ updateAuthenticated: {
+ method: "PATCH",
+ params: {
+ bio: {
+ type: "string"
+ },
+ blog: {
+ type: "string"
+ },
+ company: {
+ type: "string"
+ },
+ email: {
+ type: "string"
+ },
+ hireable: {
+ type: "boolean"
+ },
+ location: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ }
+ },
+ url: "/user"
+ }
+ }
+};
+
+const VERSION = "2.4.0";
+
+function registerEndpoints(octokit, routes) {
+ Object.keys(routes).forEach(namespaceName => {
+ if (!octokit[namespaceName]) {
+ octokit[namespaceName] = {};
+ }
+
+ Object.keys(routes[namespaceName]).forEach(apiName => {
+ const apiOptions = routes[namespaceName][apiName];
+ const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => {
+ if (typeof apiOptions[key] !== "undefined") {
+ map[key] = apiOptions[key];
+ }
+
+ return map;
+ }, {});
+ endpointDefaults.request = {
+ validate: apiOptions.params
+ };
+ let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters.
+ // Not the most elegant solution, but we don’t want to move deprecation
+ // logic into octokit/endpoint.js as it’s out of scope
+
+ const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated);
+
+ if (hasDeprecatedParam) {
+ const patch = patchForDeprecation.bind(null, octokit, apiOptions);
+ request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`);
+ request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`);
+ request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`);
+ }
+
+ if (apiOptions.deprecated) {
+ octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() {
+ octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`));
+ octokit[namespaceName][apiName] = request;
+ return request.apply(null, arguments);
+ }, request);
+ return;
+ }
+
+ octokit[namespaceName][apiName] = request;
+ });
+ });
+}
+
+function patchForDeprecation(octokit, apiOptions, method, methodName) {
+ const patchedMethod = options => {
+ options = Object.assign({}, options);
+ Object.keys(options).forEach(key => {
+ if (apiOptions.params[key] && apiOptions.params[key].deprecated) {
+ const aliasKey = apiOptions.params[key].alias;
+ octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`));
+
+ if (!(aliasKey in options)) {
+ options[aliasKey] = options[key];
+ }
+
+ delete options[key];
+ }
+ });
+ return method(options);
+ };
+
+ Object.keys(method).forEach(key => {
+ patchedMethod[key] = method[key];
+ });
+ return patchedMethod;
+}
+
+/**
+ * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
+ * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
+ * done, we will remove the registerEndpoints methods and return the methods
+ * directly as with the other plugins. At that point we will also remove the
+ * legacy workarounds and deprecations.
+ *
+ * See the plan at
+ * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
+ */
+
+function restEndpointMethods(octokit) {
+ // @ts-ignore
+ octokit.registerEndpoints = registerEndpoints.bind(null, octokit);
+ registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility
+ // See https://github.com/octokit/rest.js/pull/1134
+
+ [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => {
+ Object.defineProperty(octokit, deprecatedScope, {
+ get() {
+ octokit.log.warn( // @ts-ignore
+ new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore
+
+ return octokit[scope];
+ }
+
+ });
+ });
+ return {};
+}
+restEndpointMethods.VERSION = VERSION;
+
+exports.restEndpointMethods = restEndpointMethods;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 562:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ return "";
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 563:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getPreviousPage
+
+const getPage = __webpack_require__(265)
+
+function getPreviousPage (octokit, link, headers) {
+ return getPage(octokit, link, 'prev', headers)
+}
+
+
+/***/ }),
+
+/***/ 568:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const path = __webpack_require__(622);
+const niceTry = __webpack_require__(948);
+const resolveCommand = __webpack_require__(489);
+const escape = __webpack_require__(462);
+const readShebang = __webpack_require__(389);
+const semver = __webpack_require__(48);
+
+const isWin = process.platform === 'win32';
+const isExecutableRegExp = /\.(?:com|exe)$/i;
+const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
+
+// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
+const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false;
+
+function detectShebang(parsed) {
+ parsed.file = resolveCommand(parsed);
+
+ const shebang = parsed.file && readShebang(parsed.file);
+
+ if (shebang) {
+ parsed.args.unshift(parsed.file);
+ parsed.command = shebang;
+
+ return resolveCommand(parsed);
+ }
+
+ return parsed.file;
+}
+
+function parseNonShell(parsed) {
+ if (!isWin) {
+ return parsed;
+ }
+
+ // Detect & add support for shebangs
+ const commandFile = detectShebang(parsed);
+
+ // We don't need a shell if the command filename is an executable
+ const needsShell = !isExecutableRegExp.test(commandFile);
+
+ // If a shell is required, use cmd.exe and take care of escaping everything correctly
+ // Note that `forceShell` is an hidden option used only in tests
+ if (parsed.options.forceShell || needsShell) {
+ // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
+ // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
+ // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
+ // we need to double escape them
+ const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
+
+ // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
+ // This is necessary otherwise it will always fail with ENOENT in those cases
+ parsed.command = path.normalize(parsed.command);
+
+ // Escape command & arguments
+ parsed.command = escape.command(parsed.command);
+ parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
+
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.command = process.env.comspec || 'cmd.exe';
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ }
+
+ return parsed;
+}
+
+function parseShell(parsed) {
+ // If node supports the shell option, there's no need to mimic its behavior
+ if (supportsShellOption) {
+ return parsed;
+ }
+
+ // Mimic node shell option
+ // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ if (isWin) {
+ parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ } else {
+ if (typeof parsed.options.shell === 'string') {
+ parsed.command = parsed.options.shell;
+ } else if (process.platform === 'android') {
+ parsed.command = '/system/bin/sh';
+ } else {
+ parsed.command = '/bin/sh';
+ }
+
+ parsed.args = ['-c', shellCommand];
+ }
+
+ return parsed;
+}
+
+function parse(command, args, options) {
+ // Normalize arguments, similar to nodejs
+ if (args && !Array.isArray(args)) {
+ options = args;
+ args = null;
+ }
+
+ args = args ? args.slice(0) : []; // Clone array to avoid changing the original
+ options = Object.assign({}, options); // Clone object to avoid changing the original
+
+ // Build our parsed object
+ const parsed = {
+ command,
+ args,
+ options,
+ file: undefined,
+ original: {
+ command,
+ args,
+ },
+ };
+
+ // Delegate further parsing to shell or non-shell
+ return options.shell ? parseShell(parsed) : parseNonShell(parsed);
+}
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 570:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const exec_1 = __webpack_require__(986);
+const io = __importStar(__webpack_require__(1));
+const fs_1 = __webpack_require__(747);
+const path = __importStar(__webpack_require__(622));
+const constants_1 = __webpack_require__(573);
+function isGnuTar() {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.debug("Checking tar --version");
+ let versionOutput = "";
+ yield exec_1.exec("tar --version", [], {
+ ignoreReturnCode: true,
+ silent: true,
+ listeners: {
+ stdout: (data) => (versionOutput += data.toString()),
+ stderr: (data) => (versionOutput += data.toString())
+ }
+ });
+ core.debug(versionOutput.trim());
+ return versionOutput.toUpperCase().includes("GNU TAR");
+ });
+}
+exports.isGnuTar = isGnuTar;
+function getTarPath(args) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Explicitly use BSD Tar on Windows
+ const IS_WINDOWS = process.platform === "win32";
+ if (IS_WINDOWS) {
+ const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
+ if (fs_1.existsSync(systemTar)) {
+ return systemTar;
+ }
+ else if (isGnuTar()) {
+ args.push("--force-local");
+ }
+ }
+ return yield io.which("tar", true);
+ });
+}
+function execTar(args, cwd) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
+ }
+ catch (error) {
+ throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
+ }
+ });
+}
+function getWorkingDirectory() {
+ var _a;
+ return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd();
+}
+function extractTar(archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Create directory to extract tar into
+ const workingDirectory = getWorkingDirectory();
+ yield io.mkdirP(workingDirectory);
+ const args = [
+ "-xz",
+ "-f",
+ archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "-P",
+ "-C",
+ workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
+ ];
+ yield execTar(args);
+ });
+}
+exports.extractTar = extractTar;
+function createTar(archiveFolder, sourceDirectories) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Write source directories to manifest.txt to avoid command length limits
+ const manifestFilename = "manifest.txt";
+ fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
+ const workingDirectory = getWorkingDirectory();
+ const args = [
+ "-cz",
+ "-f",
+ constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "-P",
+ "-C",
+ workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "--files-from",
+ manifestFilename
+ ];
+ yield execTar(args, archiveFolder);
+ });
+}
+exports.createTar = createTar;
+
+
+/***/ }),
+
+/***/ 573:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+var Inputs;
+(function (Inputs) {
+ Inputs["Key"] = "key";
+ Inputs["Path"] = "path";
+ Inputs["RestoreKeys"] = "restore-keys";
+})(Inputs = exports.Inputs || (exports.Inputs = {}));
+var Outputs;
+(function (Outputs) {
+ Outputs["CacheHit"] = "cache-hit";
+})(Outputs = exports.Outputs || (exports.Outputs = {}));
+var State;
+(function (State) {
+ State["CacheKey"] = "CACHE_KEY";
+ State["CacheResult"] = "CACHE_RESULT";
+})(State = exports.State || (exports.State = {}));
+var Events;
+(function (Events) {
+ Events["Key"] = "GITHUB_EVENT_NAME";
+ Events["Push"] = "push";
+ Events["PullRequest"] = "pull_request";
+})(Events = exports.Events || (exports.Events = {}));
+exports.CacheFilename = "cache.tgz";
+// Socket timeout in milliseconds during download. If no traffic is received
+// over the socket during this period, the socket is destroyed and the download
+// is aborted.
+exports.SocketTimeout = 5000;
+
+
+/***/ }),
+
+/***/ 577:
+/***/ (function(module) {
+
+module.exports = getPageLinks
+
+function getPageLinks (link) {
+ link = link.link || link.headers.link || ''
+
+ const links = {}
+
+ // link format:
+ // '; rel="next", ; rel="last"'
+ link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => {
+ links[type] = uri
+ })
+
+ return links
+}
+
+
+/***/ }),
+
+/***/ 597:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const pathHelper = __webpack_require__(972);
+const internal_match_kind_1 = __webpack_require__(327);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Given an array of patterns, returns an array of paths to search.
+ * Duplicates and paths under other included paths are filtered out.
+ */
+function getSearchPaths(patterns) {
+ // Ignore negate patterns
+ patterns = patterns.filter(x => !x.negate);
+ // Create a map of all search paths
+ const searchPathMap = {};
+ for (const pattern of patterns) {
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ searchPathMap[key] = 'candidate';
+ }
+ const result = [];
+ for (const pattern of patterns) {
+ // Check if already included
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ if (searchPathMap[key] === 'included') {
+ continue;
+ }
+ // Check for an ancestor search path
+ let foundAncestor = false;
+ let tempKey = key;
+ let parent = pathHelper.dirname(tempKey);
+ while (parent !== tempKey) {
+ if (searchPathMap[parent]) {
+ foundAncestor = true;
+ break;
+ }
+ tempKey = parent;
+ parent = pathHelper.dirname(tempKey);
+ }
+ // Include the search pattern in the result
+ if (!foundAncestor) {
+ result.push(pattern.searchPath);
+ searchPathMap[key] = 'included';
+ }
+ }
+ return result;
+}
+exports.getSearchPaths = getSearchPaths;
+/**
+ * Matches the patterns against the path
+ */
+function match(patterns, itemPath) {
+ let result = internal_match_kind_1.MatchKind.None;
+ for (const pattern of patterns) {
+ if (pattern.negate) {
+ result &= ~pattern.match(itemPath);
+ }
+ else {
+ result |= pattern.match(itemPath);
+ }
+ }
+ return result;
+}
+exports.match = match;
+/**
+ * Checks whether to descend further into the directory
+ */
+function partialMatch(patterns, itemPath) {
+ return patterns.some(x => !x.negate && x.partialMatch(itemPath));
+}
+exports.partialMatch = partialMatch;
+//# sourceMappingURL=internal-pattern-helper.js.map
+
+/***/ }),
+
+/***/ 601:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __webpack_require__(470);
+/**
+ * Returns a copy with defaults filled in.
+ */
+function getOptions(copy) {
+ const result = {
+ followSymbolicLinks: true,
+ implicitDescendants: true,
+ omitBrokenSymbolicLinks: true
+ };
+ if (copy) {
+ if (typeof copy.followSymbolicLinks === 'boolean') {
+ result.followSymbolicLinks = copy.followSymbolicLinks;
+ core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
+ }
+ if (typeof copy.implicitDescendants === 'boolean') {
+ result.implicitDescendants = copy.implicitDescendants;
+ core.debug(`implicitDescendants '${result.implicitDescendants}'`);
+ }
+ if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
+ result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
+ core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
+ }
+ }
+ return result;
+}
+exports.getOptions = getOptions;
+//# sourceMappingURL=internal-glob-options-helper.js.map
+
+/***/ }),
+
+/***/ 605:
+/***/ (function(module) {
+
+module.exports = require("http");
+
+/***/ }),
+
+/***/ 614:
+/***/ (function(module) {
+
+module.exports = require("events");
+
+/***/ }),
+
+/***/ 621:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const path = __webpack_require__(622);
+const pathKey = __webpack_require__(39);
+
+module.exports = opts => {
+ opts = Object.assign({
+ cwd: process.cwd(),
+ path: process.env[pathKey()]
+ }, opts);
+
+ let prev;
+ let pth = path.resolve(opts.cwd);
+ const ret = [];
+
+ while (prev !== pth) {
+ ret.push(path.join(pth, 'node_modules/.bin'));
+ prev = pth;
+ pth = path.resolve(pth, '..');
+ }
+
+ // ensure the running `node` binary is used
+ ret.push(path.dirname(process.execPath));
+
+ return ret.concat(opts.path).join(path.delimiter);
+};
+
+module.exports.env = opts => {
+ opts = Object.assign({
+ env: process.env
+ }, opts);
+
+ const env = Object.assign({}, opts.env);
+ const path = pathKey({env});
+
+ opts.path = env[path];
+ env[path] = module.exports(opts);
+
+ return env;
+};
+
+
+/***/ }),
+
+/***/ 622:
+/***/ (function(module) {
+
+module.exports = require("path");
+
+/***/ }),
+
+/***/ 631:
+/***/ (function(module) {
+
+module.exports = require("net");
+
+/***/ }),
+
+/***/ 634:
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const tc = __importStar(__webpack_require__(533));
+const path = __importStar(__webpack_require__(622));
+const semver = __importStar(__webpack_require__(864));
+const httpm = __importStar(__webpack_require__(539));
+const sys = __importStar(__webpack_require__(646));
+const core_1 = __webpack_require__(470);
+function downloadGo(versionSpec, stable) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let toolPath;
+ try {
+ let match = yield findMatch(versionSpec, stable);
+ if (match) {
+ // download
+ core_1.debug(`match ${match.version}`);
+ let downloadUrl = `https://storage.googleapis.com/golang/${match.files[0].filename}`;
+ console.log(`Downloading from ${downloadUrl}`);
+ let downloadPath = yield tc.downloadTool(downloadUrl);
+ core_1.debug(`downloaded to ${downloadPath}`);
+ // extract
+ console.log('Extracting ...');
+ let extPath = sys.getPlatform() == 'windows'
+ ? yield tc.extractZip(downloadPath)
+ : yield tc.extractTar(downloadPath);
+ core_1.debug(`extracted to ${extPath}`);
+ // extracts with a root folder that matches the fileName downloaded
+ const toolRoot = path.join(extPath, 'go');
+ toolPath = yield tc.cacheDir(toolRoot, 'go', makeSemver(match.version));
+ }
+ }
+ catch (error) {
+ throw new Error(`Failed to download version ${versionSpec}: ${error}`);
+ }
+ return toolPath;
+ });
+}
+exports.downloadGo = downloadGo;
+function findMatch(versionSpec, stable) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let archFilter = sys.getArch();
+ let platFilter = sys.getPlatform();
+ let result;
+ let match;
+ const dlUrl = 'https://golang.org/dl/?mode=json&include=all';
+ let candidates = yield module.exports.getVersions(dlUrl);
+ if (!candidates) {
+ throw new Error(`golang download url did not return results`);
+ }
+ let goFile;
+ for (let i = 0; i < candidates.length; i++) {
+ let candidate = candidates[i];
+ let version = makeSemver(candidate.version);
+ // 1.13.0 is advertised as 1.13 preventing being able to match exactly 1.13.0
+ // since a semver of 1.13 would match latest 1.13
+ let parts = version.split('.');
+ if (parts.length == 2) {
+ version = version + '.0';
+ }
+ core_1.debug(`check ${version} satisfies ${versionSpec}`);
+ if (semver.satisfies(version, versionSpec) &&
+ (!stable || candidate.stable === stable)) {
+ goFile = candidate.files.find(file => {
+ core_1.debug(`${file.arch}===${archFilter} && ${file.os}===${platFilter}`);
+ return file.arch === archFilter && file.os === platFilter;
+ });
+ if (goFile) {
+ core_1.debug(`matched ${candidate.version}`);
+ match = candidate;
+ break;
+ }
+ }
+ }
+ if (match && goFile) {
+ // clone since we're mutating the file list to be only the file that matches
+ result = Object.assign({}, match);
+ result.files = [goFile];
+ }
+ return result;
+ });
+}
+exports.findMatch = findMatch;
+function getVersions(dlUrl) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // this returns versions descending so latest is first
+ let http = new httpm.HttpClient('setup-go');
+ return (yield http.getJson(dlUrl)).result;
+ });
+}
+exports.getVersions = getVersions;
+//
+// Convert the go version syntax into semver for semver matching
+// 1.13.1 => 1.13.1
+// 1.13 => 1.13.0
+// 1.10beta1 => 1.10.0-beta1, 1.10rc1 => 1.10.0-rc1
+// 1.8.5beta1 => 1.8.5-beta1, 1.8.5rc1 => 1.8.5-rc1
+function makeSemver(version) {
+ version = version.replace('go', '');
+ version = version.replace('beta', '-beta').replace('rc', '-rc');
+ let parts = version.split('-');
+ let verPart = parts[0];
+ let prereleasePart = parts.length > 1 ? `-${parts[1]}` : '';
+ let verParts = verPart.split('.');
+ if (verParts.length == 2) {
+ verPart += '.0';
+ }
+ return `${verPart}${prereleasePart}`;
+}
+exports.makeSemver = makeSemver;
+
+
+/***/ }),
+
+/***/ 644:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+exports.alphasort = alphasort
+exports.alphasorti = alphasorti
+exports.setopts = setopts
+exports.ownProp = ownProp
+exports.makeAbs = makeAbs
+exports.finish = finish
+exports.mark = mark
+exports.isIgnored = isIgnored
+exports.childrenIgnored = childrenIgnored
+
+function ownProp (obj, field) {
+ return Object.prototype.hasOwnProperty.call(obj, field)
+}
+
+var path = __webpack_require__(622)
+var minimatch = __webpack_require__(93)
+var isAbsolute = __webpack_require__(681)
+var Minimatch = minimatch.Minimatch
+
+function alphasorti (a, b) {
+ return a.toLowerCase().localeCompare(b.toLowerCase())
+}
+
+function alphasort (a, b) {
+ return a.localeCompare(b)
+}
+
+function setupIgnores (self, options) {
+ self.ignore = options.ignore || []
+
+ if (!Array.isArray(self.ignore))
+ self.ignore = [self.ignore]
+
+ if (self.ignore.length) {
+ self.ignore = self.ignore.map(ignoreMap)
+ }
+}
+
+// ignore patterns are always in dot:true mode.
+function ignoreMap (pattern) {
+ var gmatcher = null
+ if (pattern.slice(-3) === '/**') {
+ var gpattern = pattern.replace(/(\/\*\*)+$/, '')
+ gmatcher = new Minimatch(gpattern, { dot: true })
+ }
+
+ return {
+ matcher: new Minimatch(pattern, { dot: true }),
+ gmatcher: gmatcher
+ }
+}
+
+function setopts (self, pattern, options) {
+ if (!options)
+ options = {}
+
+ // base-matching: just use globstar for that.
+ if (options.matchBase && -1 === pattern.indexOf("/")) {
+ if (options.noglobstar) {
+ throw new Error("base matching requires globstar")
+ }
+ pattern = "**/" + pattern
+ }
+
+ self.silent = !!options.silent
+ self.pattern = pattern
+ self.strict = options.strict !== false
+ self.realpath = !!options.realpath
+ self.realpathCache = options.realpathCache || Object.create(null)
+ self.follow = !!options.follow
+ self.dot = !!options.dot
+ self.mark = !!options.mark
+ self.nodir = !!options.nodir
+ if (self.nodir)
+ self.mark = true
+ self.sync = !!options.sync
+ self.nounique = !!options.nounique
+ self.nonull = !!options.nonull
+ self.nosort = !!options.nosort
+ self.nocase = !!options.nocase
+ self.stat = !!options.stat
+ self.noprocess = !!options.noprocess
+ self.absolute = !!options.absolute
+
+ self.maxLength = options.maxLength || Infinity
+ self.cache = options.cache || Object.create(null)
+ self.statCache = options.statCache || Object.create(null)
+ self.symlinks = options.symlinks || Object.create(null)
+
+ setupIgnores(self, options)
+
+ self.changedCwd = false
+ var cwd = process.cwd()
+ if (!ownProp(options, "cwd"))
+ self.cwd = cwd
+ else {
+ self.cwd = path.resolve(options.cwd)
+ self.changedCwd = self.cwd !== cwd
+ }
+
+ self.root = options.root || path.resolve(self.cwd, "/")
+ self.root = path.resolve(self.root)
+ if (process.platform === "win32")
+ self.root = self.root.replace(/\\/g, "/")
+
+ // TODO: is an absolute `cwd` supposed to be resolved against `root`?
+ // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
+ self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
+ if (process.platform === "win32")
+ self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
+ self.nomount = !!options.nomount
+
+ // disable comments and negation in Minimatch.
+ // Note that they are not supported in Glob itself anyway.
+ options.nonegate = true
+ options.nocomment = true
+
+ self.minimatch = new Minimatch(pattern, options)
+ self.options = self.minimatch.options
+}
+
+function finish (self) {
+ var nou = self.nounique
+ var all = nou ? [] : Object.create(null)
+
+ for (var i = 0, l = self.matches.length; i < l; i ++) {
+ var matches = self.matches[i]
+ if (!matches || Object.keys(matches).length === 0) {
+ if (self.nonull) {
+ // do like the shell, and spit out the literal glob
+ var literal = self.minimatch.globSet[i]
+ if (nou)
+ all.push(literal)
+ else
+ all[literal] = true
+ }
+ } else {
+ // had matches
+ var m = Object.keys(matches)
+ if (nou)
+ all.push.apply(all, m)
+ else
+ m.forEach(function (m) {
+ all[m] = true
+ })
+ }
+ }
+
+ if (!nou)
+ all = Object.keys(all)
+
+ if (!self.nosort)
+ all = all.sort(self.nocase ? alphasorti : alphasort)
+
+ // at *some* point we statted all of these
+ if (self.mark) {
+ for (var i = 0; i < all.length; i++) {
+ all[i] = self._mark(all[i])
+ }
+ if (self.nodir) {
+ all = all.filter(function (e) {
+ var notDir = !(/\/$/.test(e))
+ var c = self.cache[e] || self.cache[makeAbs(self, e)]
+ if (notDir && c)
+ notDir = c !== 'DIR' && !Array.isArray(c)
+ return notDir
+ })
+ }
+ }
+
+ if (self.ignore.length)
+ all = all.filter(function(m) {
+ return !isIgnored(self, m)
+ })
+
+ self.found = all
+}
+
+function mark (self, p) {
+ var abs = makeAbs(self, p)
+ var c = self.cache[abs]
+ var m = p
+ if (c) {
+ var isDir = c === 'DIR' || Array.isArray(c)
+ var slash = p.slice(-1) === '/'
+
+ if (isDir && !slash)
+ m += '/'
+ else if (!isDir && slash)
+ m = m.slice(0, -1)
+
+ if (m !== p) {
+ var mabs = makeAbs(self, m)
+ self.statCache[mabs] = self.statCache[abs]
+ self.cache[mabs] = self.cache[abs]
+ }
+ }
+
+ return m
+}
+
+// lotta situps...
+function makeAbs (self, f) {
+ var abs = f
+ if (f.charAt(0) === '/') {
+ abs = path.join(self.root, f)
+ } else if (isAbsolute(f) || f === '') {
+ abs = f
+ } else if (self.changedCwd) {
+ abs = path.resolve(self.cwd, f)
+ } else {
+ abs = path.resolve(f)
+ }
+
+ if (process.platform === 'win32')
+ abs = abs.replace(/\\/g, '/')
+
+ return abs
+}
+
+
+// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
+// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
+function isIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+function childrenIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+
+/***/ }),
+
+/***/ 646:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+let os = __webpack_require__(87);
+function getPlatform() {
+ // darwin and linux match already
+ // freebsd not supported yet but future proofed.
+ // 'aix', 'darwin', 'freebsd', 'linux', 'openbsd', 'sunos', and 'win32'
+ let plat = os.platform();
+ // wants 'darwin', 'freebsd', 'linux', 'windows'
+ if (plat === 'win32') {
+ plat = 'windows';
+ }
+ return plat;
+}
+exports.getPlatform = getPlatform;
+function getArch() {
+ // 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', 'x32', and 'x64'.
+ let arch = os.arch();
+ // wants amd64, 386, arm64, armv61, ppc641e, s390x
+ // currently not supported by runner but future proofed mapping
+ switch (arch) {
+ case 'x64':
+ arch = 'amd64';
+ break;
+ // case 'ppc':
+ // arch = 'ppc64';
+ // break;
+ case 'x32':
+ arch = '386';
+ break;
+ }
+ return arch;
+}
+exports.getArch = getArch;
+
+
+/***/ }),
+
+/***/ 649:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getLastPage
+
+const getPage = __webpack_require__(265)
+
+function getLastPage (octokit, link, headers) {
+ return getPage(octokit, link, 'last', headers)
+}
+
+
+/***/ }),
+
+/***/ 654:
+/***/ (function(module) {
+
+// This is not the set of all possible signals.
+//
+// It IS, however, the set of all signals that trigger
+// an exit on either Linux or BSD systems. Linux is a
+// superset of the signal names supported on BSD, and
+// the unknown signals just fail to register, so we can
+// catch that easily enough.
+//
+// Don't bother with SIGKILL. It's uncatchable, which
+// means that we can't fire any callbacks anyway.
+//
+// If a user does happen to register a handler on a non-
+// fatal signal like SIGWINCH or something, and then
+// exit, it'll end up firing `process.emit('exit')`, so
+// the handler will be fired anyway.
+//
+// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
+// artificially, inherently leave the process in a
+// state from which it is not safe to try and enter JS
+// listeners.
+module.exports = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGHUP',
+ 'SIGINT',
+ 'SIGTERM'
+]
+
+if (process.platform !== 'win32') {
+ module.exports.push(
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGUSR2',
+ 'SIGTRAP',
+ 'SIGSYS',
+ 'SIGQUIT',
+ 'SIGIOT'
+ // should detect profiler and enable/disable accordingly.
+ // see #21
+ // 'SIGPROF'
+ )
+}
+
+if (process.platform === 'linux') {
+ module.exports.push(
+ 'SIGIO',
+ 'SIGPOLL',
+ 'SIGPWR',
+ 'SIGSTKFLT',
+ 'SIGUNUSED'
+ )
+}
+
+
+/***/ }),
+
+/***/ 655:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const tc = __importStar(__webpack_require__(533));
+const path_1 = __importDefault(__webpack_require__(622));
+const main_1 = __webpack_require__(514);
+// The installLint returns path to installed binary of golangci-lint.
+function installLint(versionConfig) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.info(`Installing golangci-lint ${versionConfig.TargetVersion}...`);
+ const startedAt = Date.now();
+ core.info(`Downloading ${versionConfig.AssetURL} ...`);
+ const tarGzPath = yield tc.downloadTool(versionConfig.AssetURL);
+ const extractedDir = yield tc.extractTar(tarGzPath, process.env.HOME);
+ const urlParts = versionConfig.AssetURL.split(`/`);
+ const dirName = urlParts[urlParts.length - 1].replace(/\.tar\.gz$/, ``);
+ const lintPath = path_1.default.join(extractedDir, dirName, `golangci-lint`);
+ core.info(`Installed golangci-lint into ${lintPath} in ${Date.now() - startedAt}ms`);
+ return lintPath;
+ });
+}
+exports.installLint = installLint;
+function installGo() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ process.env[`INPUT_GO-VERSION`] = `1`;
+ yield main_1.run();
+ core.info(`Installed Go in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.installGo = installGo;
+
+
+/***/ }),
+
+/***/ 669:
+/***/ (function(module) {
+
+module.exports = require("util");
+
+/***/ }),
+
+/***/ 672:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var _a;
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert_1 = __webpack_require__(357);
+const fs = __webpack_require__(747);
+const path = __webpack_require__(622);
+_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
+exports.IS_WINDOWS = process.platform === 'win32';
+function exists(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ yield exports.stat(fsPath);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ return false;
+ }
+ throw err;
+ }
+ return true;
+ });
+}
+exports.exists = exists;
+function isDirectory(fsPath, useStat = false) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
+ return stats.isDirectory();
+ });
+}
+exports.isDirectory = isDirectory;
+/**
+ * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
+ * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
+ */
+function isRooted(p) {
+ p = normalizeSeparators(p);
+ if (!p) {
+ throw new Error('isRooted() parameter "p" cannot be empty');
+ }
+ if (exports.IS_WINDOWS) {
+ return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
+ ); // e.g. C: or C:\hello
+ }
+ return p.startsWith('/');
+}
+exports.isRooted = isRooted;
+/**
+ * Recursively create a directory at `fsPath`.
+ *
+ * This implementation is optimistic, meaning it attempts to create the full
+ * path first, and backs up the path stack from there.
+ *
+ * @param fsPath The path to create
+ * @param maxDepth The maximum recursion depth
+ * @param depth The current recursion depth
+ */
+function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
+ return __awaiter(this, void 0, void 0, function* () {
+ assert_1.ok(fsPath, 'a path argument must be provided');
+ fsPath = path.resolve(fsPath);
+ if (depth >= maxDepth)
+ return exports.mkdir(fsPath);
+ try {
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ catch (err) {
+ switch (err.code) {
+ case 'ENOENT': {
+ yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ default: {
+ let stats;
+ try {
+ stats = yield exports.stat(fsPath);
+ }
+ catch (err2) {
+ throw err;
+ }
+ if (!stats.isDirectory())
+ throw err;
+ }
+ }
+ }
+ });
+}
+exports.mkdirP = mkdirP;
+/**
+ * Best effort attempt to determine whether a file exists and is executable.
+ * @param filePath file path to check
+ * @param extensions additional file extensions to try
+ * @return if file exists and is executable, returns the file path. otherwise empty string.
+ */
+function tryGetExecutablePath(filePath, extensions) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let stats = undefined;
+ try {
+ // test file exists
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // on Windows, test for valid extension
+ const upperExt = path.extname(filePath).toUpperCase();
+ if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
+ return filePath;
+ }
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ // try each extension
+ const originalFilePath = filePath;
+ for (const extension of extensions) {
+ filePath = originalFilePath + extension;
+ stats = undefined;
+ try {
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // preserve the case of the actual file (since an extension was appended)
+ try {
+ const directory = path.dirname(filePath);
+ const upperName = path.basename(filePath).toUpperCase();
+ for (const actualName of yield exports.readdir(directory)) {
+ if (upperName === actualName.toUpperCase()) {
+ filePath = path.join(directory, actualName);
+ break;
+ }
+ }
+ }
+ catch (err) {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
+ }
+ return filePath;
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ }
+ return '';
+ });
+}
+exports.tryGetExecutablePath = tryGetExecutablePath;
+function normalizeSeparators(p) {
+ p = p || '';
+ if (exports.IS_WINDOWS) {
+ // convert slashes on Windows
+ p = p.replace(/\//g, '\\');
+ // remove redundant slashes
+ return p.replace(/\\\\+/g, '\\');
+ }
+ // remove redundant slashes
+ return p.replace(/\/\/+/g, '/');
+}
+// on Mac/Linux, test the execute bit
+// R W X R W X R W X
+// 256 128 64 32 16 8 4 2 1
+function isUnixExecutable(stats) {
+ return ((stats.mode & 1) > 0 ||
+ ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
+ ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
+}
+//# sourceMappingURL=io-util.js.map
+
+/***/ }),
+
+/***/ 674:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var wrappy = __webpack_require__(11)
+var reqs = Object.create(null)
+var once = __webpack_require__(969)
+
+module.exports = wrappy(inflight)
+
+function inflight (key, cb) {
+ if (reqs[key]) {
+ reqs[key].push(cb)
+ return null
+ } else {
+ reqs[key] = [cb]
+ return makeres(key)
+ }
+}
+
+function makeres (key) {
+ return once(function RES () {
+ var cbs = reqs[key]
+ var len = cbs.length
+ var args = slice(arguments)
+
+ // XXX It's somewhat ambiguous whether a new callback added in this
+ // pass should be queued for later execution if something in the
+ // list of callbacks throws, or if it should just be discarded.
+ // However, it's such an edge case that it hardly matters, and either
+ // choice is likely as surprising as the other.
+ // As it happens, we do go ahead and schedule it for later execution.
+ try {
+ for (var i = 0; i < len; i++) {
+ cbs[i].apply(null, args)
+ }
+ } finally {
+ if (cbs.length > len) {
+ // added more in the interim.
+ // de-zalgo, just in case, but don't call again.
+ cbs.splice(0, len)
+ process.nextTick(function () {
+ RES.apply(null, args)
+ })
+ } else {
+ delete reqs[key]
+ }
+ }
+ })
+}
+
+function slice (args) {
+ var length = args.length
+ var array = []
+
+ for (var i = 0; i < length; i++) array[i] = args[i]
+ return array
+}
+
+
+/***/ }),
+
+/***/ 675:
+/***/ (function(module) {
+
+module.exports = function btoa(str) {
+ return new Buffer(str).toString('base64')
+}
+
+
+/***/ }),
+
+/***/ 681:
+/***/ (function(module) {
+
+"use strict";
+
+
+function posix(path) {
+ return path.charAt(0) === '/';
+}
+
+function win32(path) {
+ // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
+ var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
+ var result = splitDeviceRe.exec(path);
+ var device = result[1] || '';
+ var isUnc = Boolean(device && device.charAt(1) !== ':');
+
+ // UNC paths are always absolute
+ return Boolean(result[2] || isUnc);
+}
+
+module.exports = process.platform === 'win32' ? win32 : posix;
+module.exports.posix = posix;
+module.exports.win32 = win32;
+
+
+/***/ }),
+
+/***/ 686:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const os = __importStar(__webpack_require__(87));
+const events = __importStar(__webpack_require__(614));
+const child = __importStar(__webpack_require__(129));
+const path = __importStar(__webpack_require__(622));
+const io = __importStar(__webpack_require__(1));
+const ioUtil = __importStar(__webpack_require__(672));
+/* eslint-disable @typescript-eslint/unbound-method */
+const IS_WINDOWS = process.platform === 'win32';
+/*
+ * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
+ */
+class ToolRunner extends events.EventEmitter {
+ constructor(toolPath, args, options) {
+ super();
+ if (!toolPath) {
+ throw new Error("Parameter 'toolPath' cannot be null or empty.");
+ }
+ this.toolPath = toolPath;
+ this.args = args || [];
+ this.options = options || {};
+ }
+ _debug(message) {
+ if (this.options.listeners && this.options.listeners.debug) {
+ this.options.listeners.debug(message);
+ }
+ }
+ _getCommandString(options, noPrefix) {
+ const toolPath = this._getSpawnFileName();
+ const args = this._getSpawnArgs(options);
+ let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
+ if (IS_WINDOWS) {
+ // Windows + cmd file
+ if (this._isCmdFile()) {
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows + verbatim
+ else if (options.windowsVerbatimArguments) {
+ cmd += `"${toolPath}"`;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows (regular)
+ else {
+ cmd += this._windowsQuoteCmdArg(toolPath);
+ for (const a of args) {
+ cmd += ` ${this._windowsQuoteCmdArg(a)}`;
+ }
+ }
+ }
+ else {
+ // OSX/Linux - this can likely be improved with some form of quoting.
+ // creating processes on Unix is fundamentally different than Windows.
+ // on Unix, execvp() takes an arg array.
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ return cmd;
+ }
+ _processLineBuffer(data, strBuffer, onLine) {
+ try {
+ let s = strBuffer + data.toString();
+ let n = s.indexOf(os.EOL);
+ while (n > -1) {
+ const line = s.substring(0, n);
+ onLine(line);
+ // the rest of the string ...
+ s = s.substring(n + os.EOL.length);
+ n = s.indexOf(os.EOL);
+ }
+ strBuffer = s;
+ }
+ catch (err) {
+ // streaming lines to console is best effort. Don't fail a build.
+ this._debug(`error processing line. Failed with error ${err}`);
+ }
+ }
+ _getSpawnFileName() {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ return process.env['COMSPEC'] || 'cmd.exe';
+ }
+ }
+ return this.toolPath;
+ }
+ _getSpawnArgs(options) {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
+ for (const a of this.args) {
+ argline += ' ';
+ argline += options.windowsVerbatimArguments
+ ? a
+ : this._windowsQuoteCmdArg(a);
+ }
+ argline += '"';
+ return [argline];
+ }
+ }
+ return this.args;
+ }
+ _endsWith(str, end) {
+ return str.endsWith(end);
+ }
+ _isCmdFile() {
+ const upperToolPath = this.toolPath.toUpperCase();
+ return (this._endsWith(upperToolPath, '.CMD') ||
+ this._endsWith(upperToolPath, '.BAT'));
+ }
+ _windowsQuoteCmdArg(arg) {
+ // for .exe, apply the normal quoting rules that libuv applies
+ if (!this._isCmdFile()) {
+ return this._uvQuoteCmdArg(arg);
+ }
+ // otherwise apply quoting rules specific to the cmd.exe command line parser.
+ // the libuv rules are generic and are not designed specifically for cmd.exe
+ // command line parser.
+ //
+ // for a detailed description of the cmd.exe command line parser, refer to
+ // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
+ // need quotes for empty arg
+ if (!arg) {
+ return '""';
+ }
+ // determine whether the arg needs to be quoted
+ const cmdSpecialChars = [
+ ' ',
+ '\t',
+ '&',
+ '(',
+ ')',
+ '[',
+ ']',
+ '{',
+ '}',
+ '^',
+ '=',
+ ';',
+ '!',
+ "'",
+ '+',
+ ',',
+ '`',
+ '~',
+ '|',
+ '<',
+ '>',
+ '"'
+ ];
+ let needsQuotes = false;
+ for (const char of arg) {
+ if (cmdSpecialChars.some(x => x === char)) {
+ needsQuotes = true;
+ break;
+ }
+ }
+ // short-circuit if quotes not needed
+ if (!needsQuotes) {
+ return arg;
+ }
+ // the following quoting rules are very similar to the rules that by libuv applies.
+ //
+ // 1) wrap the string in quotes
+ //
+ // 2) double-up quotes - i.e. " => ""
+ //
+ // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
+ // doesn't work well with a cmd.exe command line.
+ //
+ // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
+ // for example, the command line:
+ // foo.exe "myarg:""my val"""
+ // is parsed by a .NET console app into an arg array:
+ // [ "myarg:\"my val\"" ]
+ // which is the same end result when applying libuv quoting rules. although the actual
+ // command line from libuv quoting rules would look like:
+ // foo.exe "myarg:\"my val\""
+ //
+ // 3) double-up slashes that precede a quote,
+ // e.g. hello \world => "hello \world"
+ // hello\"world => "hello\\""world"
+ // hello\\"world => "hello\\\\""world"
+ // hello world\ => "hello world\\"
+ //
+ // technically this is not required for a cmd.exe command line, or the batch argument parser.
+ // the reasons for including this as a .cmd quoting rule are:
+ //
+ // a) this is optimized for the scenario where the argument is passed from the .cmd file to an
+ // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
+ //
+ // b) it's what we've been doing previously (by deferring to node default behavior) and we
+ // haven't heard any complaints about that aspect.
+ //
+ // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
+ // escaped when used on the command line directly - even though within a .cmd file % can be escaped
+ // by using %%.
+ //
+ // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
+ // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
+ //
+ // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
+ // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
+ // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
+ // to an external program.
+ //
+ // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
+ // % can be escaped within a .cmd file.
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\'; // double the slash
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '"'; // double the quote
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _uvQuoteCmdArg(arg) {
+ // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
+ // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
+ // is used.
+ //
+ // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
+ // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
+ // pasting copyright notice from Node within this function:
+ //
+ // Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+ //
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
+ // of this software and associated documentation files (the "Software"), to
+ // deal in the Software without restriction, including without limitation the
+ // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ // sell copies of the Software, and to permit persons to whom the Software is
+ // furnished to do so, subject to the following conditions:
+ //
+ // The above copyright notice and this permission notice shall be included in
+ // all copies or substantial portions of the Software.
+ //
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ // IN THE SOFTWARE.
+ if (!arg) {
+ // Need double quotation for empty argument
+ return '""';
+ }
+ if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
+ // No quotation needed
+ return arg;
+ }
+ if (!arg.includes('"') && !arg.includes('\\')) {
+ // No embedded double quotes or backslashes, so I can just wrap
+ // quote marks around the whole thing.
+ return `"${arg}"`;
+ }
+ // Expected input/output:
+ // input : hello"world
+ // output: "hello\"world"
+ // input : hello""world
+ // output: "hello\"\"world"
+ // input : hello\world
+ // output: hello\world
+ // input : hello\\world
+ // output: hello\\world
+ // input : hello\"world
+ // output: "hello\\\"world"
+ // input : hello\\"world
+ // output: "hello\\\\\"world"
+ // input : hello world\
+ // output: "hello world\\" - note the comment in libuv actually reads "hello world\"
+ // but it appears the comment is wrong, it should be "hello world\\"
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\';
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '\\';
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _cloneExecOptions(options) {
+ options = options || {};
+ const result = {
+ cwd: options.cwd || process.cwd(),
+ env: options.env || process.env,
+ silent: options.silent || false,
+ windowsVerbatimArguments: options.windowsVerbatimArguments || false,
+ failOnStdErr: options.failOnStdErr || false,
+ ignoreReturnCode: options.ignoreReturnCode || false,
+ delay: options.delay || 10000
+ };
+ result.outStream = options.outStream || process.stdout;
+ result.errStream = options.errStream || process.stderr;
+ return result;
+ }
+ _getSpawnOptions(options, toolPath) {
+ options = options || {};
+ const result = {};
+ result.cwd = options.cwd;
+ result.env = options.env;
+ result['windowsVerbatimArguments'] =
+ options.windowsVerbatimArguments || this._isCmdFile();
+ if (options.windowsVerbatimArguments) {
+ result.argv0 = `"${toolPath}"`;
+ }
+ return result;
+ }
+ /**
+ * Exec a tool.
+ * Output will be streamed to the live console.
+ * Returns promise with return code
+ *
+ * @param tool path to tool to exec
+ * @param options optional exec options. See ExecOptions
+ * @returns number
+ */
+ exec() {
+ return __awaiter(this, void 0, void 0, function* () {
+ // root the tool path if it is unrooted and contains relative pathing
+ if (!ioUtil.isRooted(this.toolPath) &&
+ (this.toolPath.includes('/') ||
+ (IS_WINDOWS && this.toolPath.includes('\\')))) {
+ // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
+ this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
+ }
+ // if the tool is only a file name, then resolve it from the PATH
+ // otherwise verify it exists (add extension on Windows if necessary)
+ this.toolPath = yield io.which(this.toolPath, true);
+ return new Promise((resolve, reject) => {
+ this._debug(`exec tool: ${this.toolPath}`);
+ this._debug('arguments:');
+ for (const arg of this.args) {
+ this._debug(` ${arg}`);
+ }
+ const optionsNonNull = this._cloneExecOptions(this.options);
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
+ }
+ const state = new ExecState(optionsNonNull, this.toolPath);
+ state.on('debug', (message) => {
+ this._debug(message);
+ });
+ const fileName = this._getSpawnFileName();
+ const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
+ const stdbuffer = '';
+ if (cp.stdout) {
+ cp.stdout.on('data', (data) => {
+ if (this.options.listeners && this.options.listeners.stdout) {
+ this.options.listeners.stdout(data);
+ }
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(data);
+ }
+ this._processLineBuffer(data, stdbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.stdline) {
+ this.options.listeners.stdline(line);
+ }
+ });
+ });
+ }
+ const errbuffer = '';
+ if (cp.stderr) {
+ cp.stderr.on('data', (data) => {
+ state.processStderr = true;
+ if (this.options.listeners && this.options.listeners.stderr) {
+ this.options.listeners.stderr(data);
+ }
+ if (!optionsNonNull.silent &&
+ optionsNonNull.errStream &&
+ optionsNonNull.outStream) {
+ const s = optionsNonNull.failOnStdErr
+ ? optionsNonNull.errStream
+ : optionsNonNull.outStream;
+ s.write(data);
+ }
+ this._processLineBuffer(data, errbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.errline) {
+ this.options.listeners.errline(line);
+ }
+ });
+ });
+ }
+ cp.on('error', (err) => {
+ state.processError = err.message;
+ state.processExited = true;
+ state.processClosed = true;
+ state.CheckComplete();
+ });
+ cp.on('exit', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ cp.on('close', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ state.processClosed = true;
+ this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ state.on('done', (error, exitCode) => {
+ if (stdbuffer.length > 0) {
+ this.emit('stdline', stdbuffer);
+ }
+ if (errbuffer.length > 0) {
+ this.emit('errline', errbuffer);
+ }
+ cp.removeAllListeners();
+ if (error) {
+ reject(error);
+ }
+ else {
+ resolve(exitCode);
+ }
+ });
+ if (this.options.input) {
+ if (!cp.stdin) {
+ throw new Error('child process missing stdin');
+ }
+ cp.stdin.end(this.options.input);
+ }
+ });
+ });
+ }
+}
+exports.ToolRunner = ToolRunner;
+/**
+ * Convert an arg string to an array of args. Handles escaping
+ *
+ * @param argString string of arguments
+ * @returns string[] array of arguments
+ */
+function argStringToArray(argString) {
+ const args = [];
+ let inQuotes = false;
+ let escaped = false;
+ let arg = '';
+ function append(c) {
+ // we only escape double quotes.
+ if (escaped && c !== '"') {
+ arg += '\\';
+ }
+ arg += c;
+ escaped = false;
+ }
+ for (let i = 0; i < argString.length; i++) {
+ const c = argString.charAt(i);
+ if (c === '"') {
+ if (!escaped) {
+ inQuotes = !inQuotes;
+ }
+ else {
+ append(c);
+ }
+ continue;
+ }
+ if (c === '\\' && escaped) {
+ append(c);
+ continue;
+ }
+ if (c === '\\' && inQuotes) {
+ escaped = true;
+ continue;
+ }
+ if (c === ' ' && !inQuotes) {
+ if (arg.length > 0) {
+ args.push(arg);
+ arg = '';
+ }
+ continue;
+ }
+ append(c);
+ }
+ if (arg.length > 0) {
+ args.push(arg.trim());
+ }
+ return args;
+}
+exports.argStringToArray = argStringToArray;
+class ExecState extends events.EventEmitter {
+ constructor(options, toolPath) {
+ super();
+ this.processClosed = false; // tracks whether the process has exited and stdio is closed
+ this.processError = '';
+ this.processExitCode = 0;
+ this.processExited = false; // tracks whether the process has exited
+ this.processStderr = false; // tracks whether stderr was written to
+ this.delay = 10000; // 10 seconds
+ this.done = false;
+ this.timeout = null;
+ if (!toolPath) {
+ throw new Error('toolPath must not be empty');
+ }
+ this.options = options;
+ this.toolPath = toolPath;
+ if (options.delay) {
+ this.delay = options.delay;
+ }
+ }
+ CheckComplete() {
+ if (this.done) {
+ return;
+ }
+ if (this.processClosed) {
+ this._setResult();
+ }
+ else if (this.processExited) {
+ this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
+ }
+ }
+ _debug(message) {
+ this.emit('debug', message);
+ }
+ _setResult() {
+ // determine whether there is an error
+ let error;
+ if (this.processExited) {
+ if (this.processError) {
+ error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
+ }
+ else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
+ error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
+ }
+ else if (this.processStderr && this.options.failOnStdErr) {
+ error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
+ }
+ }
+ // clear the timeout
+ if (this.timeout) {
+ clearTimeout(this.timeout);
+ this.timeout = null;
+ }
+ this.done = true;
+ this.emit('done', error, this.processExitCode);
+ }
+ static HandleTimeout(state) {
+ if (state.done) {
+ return;
+ }
+ if (!state.processClosed && state.processExited) {
+ const message = `The STDIO streams did not close within ${state.delay /
+ 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
+ state._debug(message);
+ }
+ state._setResult();
+ }
+}
+//# sourceMappingURL=toolrunner.js.map
+
+/***/ }),
+
+/***/ 689:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+try {
+ var util = __webpack_require__(669);
+ /* istanbul ignore next */
+ if (typeof util.inherits !== 'function') throw '';
+ module.exports = util.inherits;
+} catch (e) {
+ /* istanbul ignore next */
+ module.exports = __webpack_require__(315);
+}
+
+
+/***/ }),
+
+/***/ 692:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 696:
+/***/ (function(module) {
+
+"use strict";
+
+
+/*!
+ * isobject
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObject(val) {
+ return val != null && typeof val === 'object' && Array.isArray(val) === false;
+}
+
+/*!
+ * is-plain-object
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObjectObject(o) {
+ return isObject(o) === true
+ && Object.prototype.toString.call(o) === '[object Object]';
+}
+
+function isPlainObject(o) {
+ var ctor,prot;
+
+ if (isObjectObject(o) === false) return false;
+
+ // If has modified constructor
+ ctor = o.constructor;
+ if (typeof ctor !== 'function') return false;
+
+ // If has modified prototype
+ prot = ctor.prototype;
+ if (isObjectObject(prot) === false) return false;
+
+ // If constructor does not have an Object-specific method
+ if (prot.hasOwnProperty('isPrototypeOf') === false) {
+ return false;
+ }
+
+ // Most likely a plain Object
+ return true;
+}
+
+module.exports = isPlainObject;
+
+
+/***/ }),
+
+/***/ 697:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = (promise, onFinally) => {
+ onFinally = onFinally || (() => {});
+
+ return promise.then(
+ val => new Promise(resolve => {
+ resolve(onFinally());
+ }).then(() => val),
+ err => new Promise(resolve => {
+ resolve(onFinally());
+ }).then(() => {
+ throw err;
+ })
+ );
+};
+
+
+/***/ }),
+
+/***/ 722:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const restore_1 = __importDefault(__webpack_require__(206));
+const save_1 = __importDefault(__webpack_require__(781));
+const crypto = __importStar(__webpack_require__(417));
+const fs = __importStar(__webpack_require__(747));
+function checksumFile(hashName, path) {
+ return new Promise((resolve, reject) => {
+ const hash = crypto.createHash(hashName);
+ const stream = fs.createReadStream(path);
+ stream.on("error", err => reject(err));
+ stream.on("data", chunk => hash.update(chunk));
+ stream.on("end", () => resolve(hash.digest("hex")));
+ });
+}
+const pathExists = (path) => __awaiter(void 0, void 0, void 0, function* () { return !!(yield fs.promises.stat(path).catch(() => false)); });
+const getLintCacheDir = () => `${process.env.HOME}/.cache/golangci-lint`;
+const getCacheDirs = () => {
+ // Not existing dirs are ok here: it works.
+ return [getLintCacheDir(), `${process.env.HOME}/.cache/go-build`, `${process.env.HOME}/go/pkg`];
+};
+const getIntervalKey = (invalidationIntervalDays) => {
+ const now = new Date();
+ const secondsSinceEpoch = now.getTime() / 1000;
+ const intervalNumber = Math.floor(secondsSinceEpoch / (invalidationIntervalDays * 86400));
+ return intervalNumber.toString();
+};
+function buildCacheKeys() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const keys = [];
+ let cacheKey = `golangci-lint.cache-`;
+ keys.push(cacheKey);
+ // Periodically invalidate a cache because a new code being added.
+ // TODO: configure it via inputs.
+ cacheKey += `${getIntervalKey(7)}-`;
+ keys.push(cacheKey);
+ if (yield pathExists(`go.mod`)) {
+ // Add checksum to key to invalidate a cache when dependencies change.
+ cacheKey += yield checksumFile(`sha1`, `go.mod`);
+ }
+ else {
+ cacheKey += `nogomod`;
+ }
+ keys.push(cacheKey);
+ return keys;
+ });
+}
+function restoreCache() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ const keys = yield buildCacheKeys();
+ const primaryKey = keys.pop();
+ const restoreKeys = keys.reverse();
+ core.info(`Primary analysis cache key is '${primaryKey}', restore keys are '${restoreKeys.join(` | `)}'`);
+ process.env[`INPUT_RESTORE-KEYS`] = restoreKeys.join(`\n`);
+ process.env.INPUT_KEY = primaryKey;
+ process.env.INPUT_PATH = getCacheDirs().join(`\n`);
+ // Tell golangci-lint to use our cache directory.
+ process.env.GOLANGCI_LINT_CACHE = getLintCacheDir();
+ yield restore_1.default();
+ core.info(`Restored cache for golangci-lint from key '${primaryKey}' in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.restoreCache = restoreCache;
+function saveCache() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ const cacheDirs = getCacheDirs();
+ process.env.INPUT_PATH = cacheDirs.join(`\n`);
+ yield save_1.default();
+ core.info(`Saved cache for golangci-lint from paths '${cacheDirs.join(`, `)}' in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.saveCache = saveCache;
+
+
+/***/ }),
+
+/***/ 728:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+class SearchState {
+ constructor(path, level) {
+ this.path = path;
+ this.level = level;
+ }
+}
+exports.SearchState = SearchState;
+//# sourceMappingURL=internal-search-state.js.map
+
+/***/ }),
+
+/***/ 741:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const { requestLog } = __webpack_require__(916);
+const {
+ restEndpointMethods
+} = __webpack_require__(559);
+
+const Core = __webpack_require__(37);
+
+const CORE_PLUGINS = [
+ __webpack_require__(890),
+ __webpack_require__(953), // deprecated: remove in v17
+ requestLog,
+ __webpack_require__(786),
+ restEndpointMethods,
+ __webpack_require__(341),
+
+ __webpack_require__(850) // deprecated: remove in v17
+];
+
+const OctokitRest = Core.plugin(CORE_PLUGINS);
+
+function DeprecatedOctokit(options) {
+ const warn =
+ options && options.log && options.log.warn
+ ? options.log.warn
+ : console.warn;
+ warn(
+ '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead'
+ );
+ return new OctokitRest(options);
+}
+
+const Octokit = Object.assign(DeprecatedOctokit, {
+ Octokit: OctokitRest
+});
+
+Object.keys(OctokitRest).forEach(key => {
+ /* istanbul ignore else */
+ if (OctokitRest.hasOwnProperty(key)) {
+ Octokit[key] = OctokitRest[key];
+ }
+});
+
+module.exports = Octokit;
+
+
+/***/ }),
+
+/***/ 742:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var fs = __webpack_require__(747)
+var core
+if (process.platform === 'win32' || global.TESTING_WINDOWS) {
+ core = __webpack_require__(818)
+} else {
+ core = __webpack_require__(197)
+}
+
+module.exports = isexe
+isexe.sync = sync
+
+function isexe (path, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ if (!cb) {
+ if (typeof Promise !== 'function') {
+ throw new TypeError('callback not provided')
+ }
+
+ return new Promise(function (resolve, reject) {
+ isexe(path, options || {}, function (er, is) {
+ if (er) {
+ reject(er)
+ } else {
+ resolve(is)
+ }
+ })
+ })
+ }
+
+ core(path, options || {}, function (er, is) {
+ // ignore EACCES because that just means we aren't allowed to run it
+ if (er) {
+ if (er.code === 'EACCES' || options && options.ignoreErrors) {
+ er = null
+ is = false
+ }
+ }
+ cb(er, is)
+ })
+}
+
+function sync (path, options) {
+ // my kingdom for a filtered catch
+ try {
+ return core.sync(path, options || {})
+ } catch (er) {
+ if (options && options.ignoreErrors || er.code === 'EACCES') {
+ return false
+ } else {
+ throw er
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 747:
+/***/ (function(module) {
+
+module.exports = require("fs");
+
+/***/ }),
+
+/***/ 753:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var endpoint = __webpack_require__(385);
+var universalUserAgent = __webpack_require__(211);
+var isPlainObject = _interopDefault(__webpack_require__(696));
+var nodeFetch = _interopDefault(__webpack_require__(454));
+var requestError = __webpack_require__(463);
+
+const VERSION = "5.4.2";
+
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+function fetchWrapper(requestOptions) {
+ if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+
+ let headers = {};
+ let status;
+ let url;
+ const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
+ return fetch(requestOptions.url, Object.assign({
+ method: requestOptions.method,
+ body: requestOptions.body,
+ headers: requestOptions.headers,
+ redirect: requestOptions.redirect
+ }, requestOptions.request)).then(response => {
+ url = response.url;
+ status = response.status;
+
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+
+ if (status === 204 || status === 205) {
+ return;
+ } // GitHub API returns 200 for HEAD requests
+
+
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+
+ throw new requestError.RequestError(response.statusText, status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status === 304) {
+ throw new requestError.RequestError("Not modified", status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status >= 400) {
+ return response.text().then(message => {
+ const error = new requestError.RequestError(message, status, {
+ headers,
+ request: requestOptions
+ });
+
+ try {
+ let responseBody = JSON.parse(error.message);
+ Object.assign(error, responseBody);
+ let errors = responseBody.errors; // Assumption `errors` would always be in Array format
+
+ error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
+ } catch (e) {// ignore, see octokit/rest.js#684
+ }
+
+ throw error;
+ });
+ }
+
+ const contentType = response.headers.get("content-type");
+
+ if (/application\/json/.test(contentType)) {
+ return response.json();
+ }
+
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+
+ return getBufferResponse(response);
+ }).then(data => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch(error => {
+ if (error instanceof requestError.RequestError) {
+ throw error;
+ }
+
+ throw new requestError.RequestError(error.message, 500, {
+ headers,
+ request: requestOptions
+ });
+ });
+}
+
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint = oldEndpoint.defaults(newDefaults);
+
+ const newApi = function (route, parameters) {
+ const endpointOptions = endpoint.merge(route, parameters);
+
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint.parse(endpointOptions));
+ }
+
+ const request = (route, parameters) => {
+ return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
+ };
+
+ Object.assign(request, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+ return endpointOptions.request.hook(request, endpointOptions);
+ };
+
+ return Object.assign(newApi, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+}
+
+const request = withDefaults(endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ }
+});
+
+exports.request = request;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 760:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
+var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __webpack_require__(470);
+const fs = __webpack_require__(747);
+const globOptionsHelper = __webpack_require__(601);
+const path = __webpack_require__(622);
+const patternHelper = __webpack_require__(597);
+const internal_match_kind_1 = __webpack_require__(327);
+const internal_pattern_1 = __webpack_require__(923);
+const internal_search_state_1 = __webpack_require__(728);
+const IS_WINDOWS = process.platform === 'win32';
+class DefaultGlobber {
+ constructor(options) {
+ this.patterns = [];
+ this.searchPaths = [];
+ this.options = globOptionsHelper.getOptions(options);
+ }
+ getSearchPaths() {
+ // Return a copy
+ return this.searchPaths.slice();
+ }
+ glob() {
+ var e_1, _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = [];
+ try {
+ for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
+ const itemPath = _c.value;
+ result.push(itemPath);
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ return result;
+ });
+ }
+ globGenerator() {
+ return __asyncGenerator(this, arguments, function* globGenerator_1() {
+ // Fill in defaults options
+ const options = globOptionsHelper.getOptions(this.options);
+ // Implicit descendants?
+ const patterns = [];
+ for (const pattern of this.patterns) {
+ patterns.push(pattern);
+ if (options.implicitDescendants &&
+ (pattern.trailingSeparator ||
+ pattern.segments[pattern.segments.length - 1] !== '**')) {
+ patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
+ }
+ }
+ // Push the search paths
+ const stack = [];
+ for (const searchPath of patternHelper.getSearchPaths(patterns)) {
+ core.debug(`Search path '${searchPath}'`);
+ // Exists?
+ try {
+ // Intentionally using lstat. Detection for broken symlink
+ // will be performed later (if following symlinks).
+ yield __await(fs.promises.lstat(searchPath));
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ continue;
+ }
+ throw err;
+ }
+ stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
+ }
+ // Search
+ const traversalChain = []; // used to detect cycles
+ while (stack.length) {
+ // Pop
+ const item = stack.pop();
+ // Match?
+ const match = patternHelper.match(patterns, item.path);
+ const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
+ if (!match && !partialMatch) {
+ continue;
+ }
+ // Stat
+ const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ );
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ if (!stats) {
+ continue;
+ }
+ // Directory
+ if (stats.isDirectory()) {
+ // Matched
+ if (match & internal_match_kind_1.MatchKind.Directory) {
+ yield yield __await(item.path);
+ }
+ // Descend?
+ else if (!partialMatch) {
+ continue;
+ }
+ // Push the child items in reverse
+ const childLevel = item.level + 1;
+ const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
+ stack.push(...childItems.reverse());
+ }
+ // File
+ else if (match & internal_match_kind_1.MatchKind.File) {
+ yield yield __await(item.path);
+ }
+ }
+ });
+ }
+ /**
+ * Constructs a DefaultGlobber
+ */
+ static create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = new DefaultGlobber(options);
+ if (IS_WINDOWS) {
+ patterns = patterns.replace(/\r\n/g, '\n');
+ patterns = patterns.replace(/\r/g, '\n');
+ }
+ const lines = patterns.split('\n').map(x => x.trim());
+ for (const line of lines) {
+ // Empty or comment
+ if (!line || line.startsWith('#')) {
+ continue;
+ }
+ // Pattern
+ else {
+ result.patterns.push(new internal_pattern_1.Pattern(line));
+ }
+ }
+ result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
+ return result;
+ });
+ }
+ static stat(item, options, traversalChain) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Note:
+ // `stat` returns info about the target of a symlink (or symlink chain)
+ // `lstat` returns info about a symlink itself
+ let stats;
+ if (options.followSymbolicLinks) {
+ try {
+ // Use `stat` (following symlinks)
+ stats = yield fs.promises.stat(item.path);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ if (options.omitBrokenSymbolicLinks) {
+ core.debug(`Broken symlink '${item.path}'`);
+ return undefined;
+ }
+ throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
+ }
+ throw err;
+ }
+ }
+ else {
+ // Use `lstat` (not following symlinks)
+ stats = yield fs.promises.lstat(item.path);
+ }
+ // Note, isDirectory() returns false for the lstat of a symlink
+ if (stats.isDirectory() && options.followSymbolicLinks) {
+ // Get the realpath
+ const realPath = yield fs.promises.realpath(item.path);
+ // Fixup the traversal chain to match the item level
+ while (traversalChain.length >= item.level) {
+ traversalChain.pop();
+ }
+ // Test for a cycle
+ if (traversalChain.some((x) => x === realPath)) {
+ core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+ return undefined;
+ }
+ // Update the traversal chain
+ traversalChain.push(realPath);
+ }
+ return stats;
+ });
+ }
+}
+exports.DefaultGlobber = DefaultGlobber;
+//# sourceMappingURL=internal-globber.js.map
+
+/***/ }),
+
+/***/ 761:
+/***/ (function(module) {
+
+module.exports = require("zlib");
+
+/***/ }),
+
+/***/ 763:
+/***/ (function(module) {
+
+module.exports = removeHook
+
+function removeHook (state, name, method) {
+ if (!state.registry[name]) {
+ return
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) { return registered.orig })
+ .indexOf(method)
+
+ if (index === -1) {
+ return
+ }
+
+ state.registry[name].splice(index, 1)
+}
+
+
+/***/ }),
+
+/***/ 768:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = function (x) {
+ var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt();
+ var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt();
+
+ if (x[x.length - 1] === lf) {
+ x = x.slice(0, x.length - 1);
+ }
+
+ if (x[x.length - 1] === cr) {
+ x = x.slice(0, x.length - 1);
+ }
+
+ return x;
+};
+
+
+/***/ }),
+
+/***/ 777:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getFirstPage
+
+const getPage = __webpack_require__(265)
+
+function getFirstPage (octokit, link, headers) {
+ return getPage(octokit, link, 'first', headers)
+}
+
+
+/***/ }),
+
+/***/ 781:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const path = __importStar(__webpack_require__(622));
+const cacheHttpClient = __importStar(__webpack_require__(342));
+const constants_1 = __webpack_require__(573);
+const tar_1 = __webpack_require__(570);
+const utils = __importStar(__webpack_require__(867));
+function run() {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ if (!utils.isValidEvent()) {
+ utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
+ .getSupportedEvents()
+ .join(", ")} events are supported at this time.`);
+ return;
+ }
+ const state = utils.getCacheState();
+ // Inputs are re-evaluted before the post action, so we want the original key used for restore
+ const primaryKey = core.getState(constants_1.State.CacheKey);
+ if (!primaryKey) {
+ utils.logWarning(`Error retrieving key from state.`);
+ return;
+ }
+ if (utils.isExactKeyMatch(primaryKey, state)) {
+ core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
+ return;
+ }
+ core.debug("Reserving Cache");
+ const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
+ if (cacheId == -1) {
+ core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
+ return;
+ }
+ core.debug(`Cache ID: ${cacheId}`);
+ const cachePaths = yield utils.resolvePaths(core
+ .getInput(constants_1.Inputs.Path, { required: true })
+ .split("\n")
+ .filter(x => x !== ""));
+ core.debug("Cache Paths:");
+ core.debug(`${JSON.stringify(cachePaths)}`);
+ const archiveFolder = yield utils.createTempDirectory();
+ const archivePath = path.join(archiveFolder, constants_1.CacheFilename);
+ core.debug(`Archive Path: ${archivePath}`);
+ yield tar_1.createTar(archiveFolder, cachePaths);
+ const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
+ const archiveFileSize = utils.getArchiveFileSize(archivePath);
+ core.debug(`File Size: ${archiveFileSize}`);
+ if (archiveFileSize > fileSizeLimit) {
+ utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
+ return;
+ }
+ core.debug(`Saving Cache (ID: ${cacheId})`);
+ yield cacheHttpClient.saveCache(cacheId, archivePath);
+ }
+ catch (error) {
+ utils.logWarning(error.message);
+ }
+ });
+}
+exports.default = run;
+
+
+/***/ }),
+
+/***/ 786:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = paginatePlugin;
+
+const { paginateRest } = __webpack_require__(322);
+
+function paginatePlugin(octokit) {
+ Object.assign(octokit, paginateRest(octokit));
+}
+
+
+/***/ }),
+
+/***/ 790:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticate;
+
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
+
+function authenticate(state, options) {
+ deprecateAuthenticate(
+ state.octokit.log,
+ new Deprecation(
+ '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.'
+ )
+ );
+
+ if (!options) {
+ state.auth = false;
+ return;
+ }
+
+ switch (options.type) {
+ case "basic":
+ if (!options.username || !options.password) {
+ throw new Error(
+ "Basic authentication requires both a username and password to be set"
+ );
+ }
+ break;
+
+ case "oauth":
+ if (!options.token && !(options.key && options.secret)) {
+ throw new Error(
+ "OAuth2 authentication requires a token or key & secret to be set"
+ );
+ }
+ break;
+
+ case "token":
+ case "app":
+ if (!options.token) {
+ throw new Error("Token authentication requires a token to be set");
+ }
+ break;
+
+ default:
+ throw new Error(
+ "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'"
+ );
+ }
+
+ state.auth = options;
+}
+
+
+/***/ }),
+
+/***/ 796:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ throw error;
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 813:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+async function auth(token) {
+ const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
+ return {
+ type: "token",
+ token: token,
+ tokenType
+ };
+}
+
+/**
+ * Prefix token for usage in the Authorization header
+ *
+ * @param token OAuth token or JSON Web Token
+ */
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
+ }
+
+ return `token ${token}`;
+}
+
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(route, parameters);
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
+}
+
+const createTokenAuth = function createTokenAuth(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
+
+ if (typeof token !== "string") {
+ throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
+ }
+
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
+
+exports.createTokenAuth = createTokenAuth;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 814:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = which
+which.sync = whichSync
+
+var isWindows = process.platform === 'win32' ||
+ process.env.OSTYPE === 'cygwin' ||
+ process.env.OSTYPE === 'msys'
+
+var path = __webpack_require__(622)
+var COLON = isWindows ? ';' : ':'
+var isexe = __webpack_require__(742)
+
+function getNotFoundError (cmd) {
+ var er = new Error('not found: ' + cmd)
+ er.code = 'ENOENT'
+
+ return er
+}
+
+function getPathInfo (cmd, opt) {
+ var colon = opt.colon || COLON
+ var pathEnv = opt.path || process.env.PATH || ''
+ var pathExt = ['']
+
+ pathEnv = pathEnv.split(colon)
+
+ var pathExtExe = ''
+ if (isWindows) {
+ pathEnv.unshift(process.cwd())
+ pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
+ pathExt = pathExtExe.split(colon)
+
+
+ // Always test the cmd itself first. isexe will check to make sure
+ // it's found in the pathExt set.
+ if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
+ pathExt.unshift('')
+ }
+
+ // If it has a slash, then we don't bother searching the pathenv.
+ // just check the file itself, and that's it.
+ if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
+ pathEnv = ['']
+
+ return {
+ env: pathEnv,
+ ext: pathExt,
+ extExe: pathExtExe
+ }
+}
+
+function which (cmd, opt, cb) {
+ if (typeof opt === 'function') {
+ cb = opt
+ opt = {}
+ }
+
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
+
+ ;(function F (i, l) {
+ if (i === l) {
+ if (opt.all && found.length)
+ return cb(null, found)
+ else
+ return cb(getNotFoundError(cmd))
+ }
+
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
+
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
+ ;(function E (ii, ll) {
+ if (ii === ll) return F(i + 1, l)
+ var ext = pathExt[ii]
+ isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
+ if (!er && is) {
+ if (opt.all)
+ found.push(p + ext)
+ else
+ return cb(null, p + ext)
+ }
+ return E(ii + 1, ll)
+ })
+ })(0, pathExt.length)
+ })(0, pathEnv.length)
+}
+
+function whichSync (cmd, opt) {
+ opt = opt || {}
+
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
+
+ for (var i = 0, l = pathEnv.length; i < l; i ++) {
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
+
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && /^\.[\\\/]/.test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
+ for (var j = 0, ll = pathExt.length; j < ll; j ++) {
+ var cur = p + pathExt[j]
+ var is
+ try {
+ is = isexe.sync(cur, { pathExt: pathExtExe })
+ if (is) {
+ if (opt.all)
+ found.push(cur)
+ else
+ return cur
+ }
+ } catch (ex) {}
+ }
+ }
+
+ if (opt.all && found.length)
+ return found
+
+ if (opt.nothrow)
+ return null
+
+ throw getNotFoundError(cmd)
+}
+
+
+/***/ }),
+
+/***/ 816:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = /^#!.*/;
+
+
+/***/ }),
+
+/***/ 818:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = isexe
+isexe.sync = sync
+
+var fs = __webpack_require__(747)
+
+function checkPathExt (path, options) {
+ var pathext = options.pathExt !== undefined ?
+ options.pathExt : process.env.PATHEXT
+
+ if (!pathext) {
+ return true
+ }
+
+ pathext = pathext.split(';')
+ if (pathext.indexOf('') !== -1) {
+ return true
+ }
+ for (var i = 0; i < pathext.length; i++) {
+ var p = pathext[i].toLowerCase()
+ if (p && path.substr(-p.length).toLowerCase() === p) {
+ return true
+ }
+ }
+ return false
+}
+
+function checkStat (stat, path, options) {
+ if (!stat.isSymbolicLink() && !stat.isFile()) {
+ return false
+ }
+ return checkPathExt(path, options)
+}
+
+function isexe (path, options, cb) {
+ fs.stat(path, function (er, stat) {
+ cb(er, er ? false : checkStat(stat, path, options))
+ })
+}
+
+function sync (path, options) {
+ return checkStat(fs.statSync(path), path, options)
+}
+
+
+/***/ }),
+
+/***/ 826:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var rng = __webpack_require__(139);
+var bytesToUuid = __webpack_require__(105);
+
+function v4(options, buf, offset) {
+ var i = buf && offset || 0;
+
+ if (typeof(options) == 'string') {
+ buf = options === 'binary' ? new Array(16) : null;
+ options = null;
+ }
+ options = options || {};
+
+ var rnds = options.random || (options.rng || rng)();
+
+ // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+ rnds[6] = (rnds[6] & 0x0f) | 0x40;
+ rnds[8] = (rnds[8] & 0x3f) | 0x80;
+
+ // Copy bytes to buffer, if provided
+ if (buf) {
+ for (var ii = 0; ii < 16; ++ii) {
+ buf[i + ii] = rnds[ii];
+ }
+ }
+
+ return buf || bytesToUuid(rnds);
+}
+
+module.exports = v4;
+
+
+/***/ }),
+
+/***/ 835:
+/***/ (function(module) {
+
+module.exports = require("url");
+
+/***/ }),
+
+/***/ 850:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = paginationMethodsPlugin
+
+function paginationMethodsPlugin (octokit) {
+ octokit.getFirstPage = __webpack_require__(777).bind(null, octokit)
+ octokit.getLastPage = __webpack_require__(649).bind(null, octokit)
+ octokit.getNextPage = __webpack_require__(550).bind(null, octokit)
+ octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit)
+ octokit.hasFirstPage = __webpack_require__(536)
+ octokit.hasLastPage = __webpack_require__(336)
+ octokit.hasNextPage = __webpack_require__(929)
+ octokit.hasPreviousPage = __webpack_require__(558)
+}
+
+
+/***/ }),
+
+/***/ 854:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the `TypeError` message for "Functions" methods. */
+var FUNC_ERROR_TEXT = 'Expected a function';
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]',
+ symbolTag = '[object Symbol]';
+
+/** Used to match property names within property paths. */
+var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
+ reIsPlainProp = /^\w*$/,
+ reLeadingDot = /^\./,
+ rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to match backslashes in property paths. */
+var reEscapeChar = /\\(\\)?/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var Symbol = root.Symbol,
+ splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ nativeCreate = getNative(Object, 'create');
+
+/** Used to convert symbols to primitives and strings. */
+var symbolProto = Symbol ? Symbol.prototype : undefined,
+ symbolToString = symbolProto ? symbolProto.toString : undefined;
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.get` without support for default values.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {Array|string} path The path of the property to get.
+ * @returns {*} Returns the resolved value.
+ */
+function baseGet(object, path) {
+ path = isKey(path, object) ? [path] : castPath(path);
+
+ var index = 0,
+ length = path.length;
+
+ while (object != null && index < length) {
+ object = object[toKey(path[index++])];
+ }
+ return (index && index == length) ? object : undefined;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.toString` which doesn't convert nullish
+ * values to empty strings.
+ *
+ * @private
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ */
+function baseToString(value) {
+ // Exit early for strings to avoid a performance hit in some environments.
+ if (typeof value == 'string') {
+ return value;
+ }
+ if (isSymbol(value)) {
+ return symbolToString ? symbolToString.call(value) : '';
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Casts `value` to a path array if it's not one.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {Array} Returns the cast property path array.
+ */
+function castPath(value) {
+ return isArray(value) ? value : stringToPath(value);
+}
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is a property name and not a property path.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {Object} [object] The object to query keys on.
+ * @returns {boolean} Returns `true` if `value` is a property name, else `false`.
+ */
+function isKey(value, object) {
+ if (isArray(value)) {
+ return false;
+ }
+ var type = typeof value;
+ if (type == 'number' || type == 'symbol' || type == 'boolean' ||
+ value == null || isSymbol(value)) {
+ return true;
+ }
+ return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
+ (object != null && value in Object(object));
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `string` to a property path array.
+ *
+ * @private
+ * @param {string} string The string to convert.
+ * @returns {Array} Returns the property path array.
+ */
+var stringToPath = memoize(function(string) {
+ string = toString(string);
+
+ var result = [];
+ if (reLeadingDot.test(string)) {
+ result.push('');
+ }
+ string.replace(rePropName, function(match, number, quote, string) {
+ result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
+ });
+ return result;
+});
+
+/**
+ * Converts `value` to a string key if it's not a string or symbol.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {string|symbol} Returns the key.
+ */
+function toKey(value) {
+ if (typeof value == 'string' || isSymbol(value)) {
+ return value;
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a function that memoizes the result of `func`. If `resolver` is
+ * provided, it determines the cache key for storing the result based on the
+ * arguments provided to the memoized function. By default, the first argument
+ * provided to the memoized function is used as the map cache key. The `func`
+ * is invoked with the `this` binding of the memoized function.
+ *
+ * **Note:** The cache is exposed as the `cache` property on the memoized
+ * function. Its creation may be customized by replacing the `_.memoize.Cache`
+ * constructor with one whose instances implement the
+ * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
+ * method interface of `delete`, `get`, `has`, and `set`.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Function
+ * @param {Function} func The function to have its output memoized.
+ * @param {Function} [resolver] The function to resolve the cache key.
+ * @returns {Function} Returns the new memoized function.
+ * @example
+ *
+ * var object = { 'a': 1, 'b': 2 };
+ * var other = { 'c': 3, 'd': 4 };
+ *
+ * var values = _.memoize(_.values);
+ * values(object);
+ * // => [1, 2]
+ *
+ * values(other);
+ * // => [3, 4]
+ *
+ * object.a = 2;
+ * values(object);
+ * // => [1, 2]
+ *
+ * // Modify the result cache.
+ * values.cache.set(object, ['a', 'b']);
+ * values(object);
+ * // => ['a', 'b']
+ *
+ * // Replace `_.memoize.Cache`.
+ * _.memoize.Cache = WeakMap;
+ */
+function memoize(func, resolver) {
+ if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
+ throw new TypeError(FUNC_ERROR_TEXT);
+ }
+ var memoized = function() {
+ var args = arguments,
+ key = resolver ? resolver.apply(this, args) : args[0],
+ cache = memoized.cache;
+
+ if (cache.has(key)) {
+ return cache.get(key);
+ }
+ var result = func.apply(this, args);
+ memoized.cache = cache.set(key, result);
+ return result;
+ };
+ memoized.cache = new (memoize.Cache || MapCache);
+ return memoized;
+}
+
+// Assign cache to `_.memoize`.
+memoize.Cache = MapCache;
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as an `Array` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an array, else `false`.
+ * @example
+ *
+ * _.isArray([1, 2, 3]);
+ * // => true
+ *
+ * _.isArray(document.body.children);
+ * // => false
+ *
+ * _.isArray('abc');
+ * // => false
+ *
+ * _.isArray(_.noop);
+ * // => false
+ */
+var isArray = Array.isArray;
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * Checks if `value` is object-like. A value is object-like if it's not `null`
+ * and has a `typeof` result of "object".
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
+ * @example
+ *
+ * _.isObjectLike({});
+ * // => true
+ *
+ * _.isObjectLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isObjectLike(_.noop);
+ * // => false
+ *
+ * _.isObjectLike(null);
+ * // => false
+ */
+function isObjectLike(value) {
+ return !!value && typeof value == 'object';
+}
+
+/**
+ * Checks if `value` is classified as a `Symbol` primitive or object.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
+ * @example
+ *
+ * _.isSymbol(Symbol.iterator);
+ * // => true
+ *
+ * _.isSymbol('abc');
+ * // => false
+ */
+function isSymbol(value) {
+ return typeof value == 'symbol' ||
+ (isObjectLike(value) && objectToString.call(value) == symbolTag);
+}
+
+/**
+ * Converts `value` to a string. An empty string is returned for `null`
+ * and `undefined` values. The sign of `-0` is preserved.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ * @example
+ *
+ * _.toString(null);
+ * // => ''
+ *
+ * _.toString(-0);
+ * // => '-0'
+ *
+ * _.toString([1, 2, 3]);
+ * // => '1,2,3'
+ */
+function toString(value) {
+ return value == null ? '' : baseToString(value);
+}
+
+/**
+ * Gets the value at `path` of `object`. If the resolved value is
+ * `undefined`, the `defaultValue` is returned in its place.
+ *
+ * @static
+ * @memberOf _
+ * @since 3.7.0
+ * @category Object
+ * @param {Object} object The object to query.
+ * @param {Array|string} path The path of the property to get.
+ * @param {*} [defaultValue] The value returned for `undefined` resolved values.
+ * @returns {*} Returns the resolved value.
+ * @example
+ *
+ * var object = { 'a': [{ 'b': { 'c': 3 } }] };
+ *
+ * _.get(object, 'a[0].b.c');
+ * // => 3
+ *
+ * _.get(object, ['a', '0', 'b', 'c']);
+ * // => 3
+ *
+ * _.get(object, 'a.b.c', 'default');
+ * // => 'default'
+ */
+function get(object, path, defaultValue) {
+ var result = object == null ? undefined : baseGet(object, path);
+ return result === undefined ? defaultValue : result;
+}
+
+module.exports = get;
+
+
+/***/ }),
+
+/***/ 856:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = __webpack_require__(141);
+
+
+/***/ }),
+
+/***/ 861:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = registerPlugin;
+
+const factory = __webpack_require__(513);
+
+function registerPlugin(plugins, pluginFunction) {
+ return factory(
+ plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction)
+ );
+}
+
+
+/***/ }),
+
+/***/ 864:
/***/ (function(module, exports) {
exports = module.exports = SemVer
@@ -4240,3396 +33111,31 @@ function coerce (version, options) {
/***/ }),
-/***/ 281:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const internal_globber_1 = __webpack_require__(297);
-/**
- * Constructs a globber
- *
- * @param patterns Patterns separated by newlines
- * @param options Glob options
- */
-function create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- return yield internal_globber_1.DefaultGlobber.create(patterns, options);
- });
-}
-exports.create = create;
-//# sourceMappingURL=glob.js.map
-
-/***/ }),
-
-/***/ 297:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __asyncValues = (this && this.__asyncValues) || function (o) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var m = o[Symbol.asyncIterator], i;
- return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
- function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
- function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
-};
-var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
-var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var g = generator.apply(thisArg, _arguments || []), i, q = [];
- return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
- function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
- function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
- function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
- function fulfill(value) { resume("next", value); }
- function reject(value) { resume("throw", value); }
- function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __webpack_require__(470);
-const fs = __webpack_require__(747);
-const globOptionsHelper = __webpack_require__(601);
-const path = __webpack_require__(622);
-const patternHelper = __webpack_require__(597);
-const internal_match_kind_1 = __webpack_require__(327);
-const internal_pattern_1 = __webpack_require__(923);
-const internal_search_state_1 = __webpack_require__(728);
-const IS_WINDOWS = process.platform === 'win32';
-class DefaultGlobber {
- constructor(options) {
- this.patterns = [];
- this.searchPaths = [];
- this.options = globOptionsHelper.getOptions(options);
- }
- getSearchPaths() {
- // Return a copy
- return this.searchPaths.slice();
- }
- glob() {
- var e_1, _a;
- return __awaiter(this, void 0, void 0, function* () {
- const result = [];
- try {
- for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
- const itemPath = _c.value;
- result.push(itemPath);
- }
- }
- catch (e_1_1) { e_1 = { error: e_1_1 }; }
- finally {
- try {
- if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
- }
- finally { if (e_1) throw e_1.error; }
- }
- return result;
- });
- }
- globGenerator() {
- return __asyncGenerator(this, arguments, function* globGenerator_1() {
- // Fill in defaults options
- const options = globOptionsHelper.getOptions(this.options);
- // Implicit descendants?
- const patterns = [];
- for (const pattern of this.patterns) {
- patterns.push(pattern);
- if (options.implicitDescendants &&
- (pattern.trailingSeparator ||
- pattern.segments[pattern.segments.length - 1] !== '**')) {
- patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
- }
- }
- // Push the search paths
- const stack = [];
- for (const searchPath of patternHelper.getSearchPaths(patterns)) {
- core.debug(`Search path '${searchPath}'`);
- // Exists?
- try {
- // Intentionally using lstat. Detection for broken symlink
- // will be performed later (if following symlinks).
- yield __await(fs.promises.lstat(searchPath));
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- continue;
- }
- throw err;
- }
- stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
- }
- // Search
- const traversalChain = []; // used to detect cycles
- while (stack.length) {
- // Pop
- const item = stack.pop();
- // Match?
- const match = patternHelper.match(patterns, item.path);
- const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
- if (!match && !partialMatch) {
- continue;
- }
- // Stat
- const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
- // Broken symlink, or symlink cycle detected, or no longer exists
- );
- // Broken symlink, or symlink cycle detected, or no longer exists
- if (!stats) {
- continue;
- }
- // Directory
- if (stats.isDirectory()) {
- // Matched
- if (match & internal_match_kind_1.MatchKind.Directory) {
- yield yield __await(item.path);
- }
- // Descend?
- else if (!partialMatch) {
- continue;
- }
- // Push the child items in reverse
- const childLevel = item.level + 1;
- const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
- stack.push(...childItems.reverse());
- }
- // File
- else if (match & internal_match_kind_1.MatchKind.File) {
- yield yield __await(item.path);
- }
- }
- });
- }
- /**
- * Constructs a DefaultGlobber
- */
- static create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const result = new DefaultGlobber(options);
- if (IS_WINDOWS) {
- patterns = patterns.replace(/\r\n/g, '\n');
- patterns = patterns.replace(/\r/g, '\n');
- }
- const lines = patterns.split('\n').map(x => x.trim());
- for (const line of lines) {
- // Empty or comment
- if (!line || line.startsWith('#')) {
- continue;
- }
- // Pattern
- else {
- result.patterns.push(new internal_pattern_1.Pattern(line));
- }
- }
- result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
- return result;
- });
- }
- static stat(item, options, traversalChain) {
- return __awaiter(this, void 0, void 0, function* () {
- // Note:
- // `stat` returns info about the target of a symlink (or symlink chain)
- // `lstat` returns info about a symlink itself
- let stats;
- if (options.followSymbolicLinks) {
- try {
- // Use `stat` (following symlinks)
- stats = yield fs.promises.stat(item.path);
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- if (options.omitBrokenSymbolicLinks) {
- core.debug(`Broken symlink '${item.path}'`);
- return undefined;
- }
- throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
- }
- throw err;
- }
- }
- else {
- // Use `lstat` (not following symlinks)
- stats = yield fs.promises.lstat(item.path);
- }
- // Note, isDirectory() returns false for the lstat of a symlink
- if (stats.isDirectory() && options.followSymbolicLinks) {
- // Get the realpath
- const realPath = yield fs.promises.realpath(item.path);
- // Fixup the traversal chain to match the item level
- while (traversalChain.length >= item.level) {
- traversalChain.pop();
- }
- // Test for a cycle
- if (traversalChain.some((x) => x === realPath)) {
- core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
- return undefined;
- }
- // Update the traversal chain
- traversalChain.push(realPath);
- }
- return stats;
- });
- }
-}
-exports.DefaultGlobber = DefaultGlobber;
-//# sourceMappingURL=internal-globber.js.map
-
-/***/ }),
-
-/***/ 306:
+/***/ 866:
/***/ (function(module, __unusedexports, __webpack_require__) {
-var concatMap = __webpack_require__(896);
-var balanced = __webpack_require__(621);
-
-module.exports = expandTop;
-
-var escSlash = '\0SLASH'+Math.random()+'\0';
-var escOpen = '\0OPEN'+Math.random()+'\0';
-var escClose = '\0CLOSE'+Math.random()+'\0';
-var escComma = '\0COMMA'+Math.random()+'\0';
-var escPeriod = '\0PERIOD'+Math.random()+'\0';
-
-function numeric(str) {
- return parseInt(str, 10) == str
- ? parseInt(str, 10)
- : str.charCodeAt(0);
-}
-
-function escapeBraces(str) {
- return str.split('\\\\').join(escSlash)
- .split('\\{').join(escOpen)
- .split('\\}').join(escClose)
- .split('\\,').join(escComma)
- .split('\\.').join(escPeriod);
-}
-
-function unescapeBraces(str) {
- return str.split(escSlash).join('\\')
- .split(escOpen).join('{')
- .split(escClose).join('}')
- .split(escComma).join(',')
- .split(escPeriod).join('.');
-}
-
-
-// Basically just str.split(","), but handling cases
-// where we have nested braced sections, which should be
-// treated as individual members, like {a,{b,c},d}
-function parseCommaParts(str) {
- if (!str)
- return [''];
-
- var parts = [];
- var m = balanced('{', '}', str);
-
- if (!m)
- return str.split(',');
-
- var pre = m.pre;
- var body = m.body;
- var post = m.post;
- var p = pre.split(',');
-
- p[p.length-1] += '{' + body + '}';
- var postParts = parseCommaParts(post);
- if (post.length) {
- p[p.length-1] += postParts.shift();
- p.push.apply(p, postParts);
- }
-
- parts.push.apply(parts, p);
-
- return parts;
-}
-
-function expandTop(str) {
- if (!str)
- return [];
-
- // I don't know why Bash 4.3 does this, but it does.
- // Anything starting with {} will have the first two bytes preserved
- // but *only* at the top level, so {},a}b will not expand to anything,
- // but a{},b}c will be expanded to [a}c,abc].
- // One could argue that this is a bug in Bash, but since the goal of
- // this module is to match Bash's rules, we escape a leading {}
- if (str.substr(0, 2) === '{}') {
- str = '\\{\\}' + str.substr(2);
- }
-
- return expand(escapeBraces(str), true).map(unescapeBraces);
-}
-
-function identity(e) {
- return e;
-}
-
-function embrace(str) {
- return '{' + str + '}';
-}
-function isPadded(el) {
- return /^-?0\d/.test(el);
-}
-
-function lte(i, y) {
- return i <= y;
-}
-function gte(i, y) {
- return i >= y;
-}
-
-function expand(str, isTop) {
- var expansions = [];
-
- var m = balanced('{', '}', str);
- if (!m || /\$$/.test(m.pre)) return [str];
-
- var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
- var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
- var isSequence = isNumericSequence || isAlphaSequence;
- var isOptions = m.body.indexOf(',') >= 0;
- if (!isSequence && !isOptions) {
- // {a},b}
- if (m.post.match(/,.*\}/)) {
- str = m.pre + '{' + m.body + escClose + m.post;
- return expand(str);
- }
- return [str];
- }
-
- var n;
- if (isSequence) {
- n = m.body.split(/\.\./);
- } else {
- n = parseCommaParts(m.body);
- if (n.length === 1) {
- // x{{a,b}}y ==> x{a}y x{b}y
- n = expand(n[0], false).map(embrace);
- if (n.length === 1) {
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
- return post.map(function(p) {
- return m.pre + n[0] + p;
- });
- }
- }
- }
-
- // at this point, n is the parts, and we know it's not a comma set
- // with a single entry.
-
- // no need to expand pre, since it is guaranteed to be free of brace-sets
- var pre = m.pre;
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
-
- var N;
-
- if (isSequence) {
- var x = numeric(n[0]);
- var y = numeric(n[1]);
- var width = Math.max(n[0].length, n[1].length)
- var incr = n.length == 3
- ? Math.abs(numeric(n[2]))
- : 1;
- var test = lte;
- var reverse = y < x;
- if (reverse) {
- incr *= -1;
- test = gte;
- }
- var pad = n.some(isPadded);
-
- N = [];
-
- for (var i = x; test(i, y); i += incr) {
- var c;
- if (isAlphaSequence) {
- c = String.fromCharCode(i);
- if (c === '\\')
- c = '';
- } else {
- c = String(i);
- if (pad) {
- var need = width - c.length;
- if (need > 0) {
- var z = new Array(need + 1).join('0');
- if (i < 0)
- c = '-' + z + c.slice(1);
- else
- c = z + c;
- }
- }
- }
- N.push(c);
- }
- } else {
- N = concatMap(n, function(el) { return expand(el, false) });
- }
-
- for (var j = 0; j < N.length; j++) {
- for (var k = 0; k < post.length; k++) {
- var expansion = pre + N[j] + post[k];
- if (!isTop || isSequence || expansion)
- expansions.push(expansion);
- }
- }
-
- return expansions;
-}
-
-
-
-/***/ }),
-
-/***/ 327:
-/***/ (function(__unusedmodule, exports) {
-
"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-/**
- * Indicates whether a pattern matches a path
- */
-var MatchKind;
-(function (MatchKind) {
- /** Not matched */
- MatchKind[MatchKind["None"] = 0] = "None";
- /** Matched if the path is a directory */
- MatchKind[MatchKind["Directory"] = 1] = "Directory";
- /** Matched if the path is a regular file */
- MatchKind[MatchKind["File"] = 2] = "File";
- /** Matched */
- MatchKind[MatchKind["All"] = 3] = "All";
-})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
-//# sourceMappingURL=internal-match-kind.js.map
+var shebangRegex = __webpack_require__(816);
-/***/ }),
+module.exports = function (str) {
+ var match = str.match(shebangRegex);
-/***/ 342:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ if (!match) {
+ return null;
+ }
-"use strict";
+ var arr = match[0].replace(/#! ?/, '').split(' ');
+ var bin = arr[0].split('/').pop();
+ var arg = arr[1];
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+ return (bin === 'env' ?
+ arg :
+ bin + (arg ? ' ' + arg : '')
+ );
};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const http_client_1 = __webpack_require__(539);
-const auth_1 = __webpack_require__(226);
-const crypto = __importStar(__webpack_require__(417));
-const fs = __importStar(__webpack_require__(747));
-const stream = __importStar(__webpack_require__(794));
-const util = __importStar(__webpack_require__(669));
-const constants_1 = __webpack_require__(929);
-const utils = __importStar(__webpack_require__(867));
-const versionSalt = "1.0";
-function isSuccessStatusCode(statusCode) {
- if (!statusCode) {
- return false;
- }
- return statusCode >= 200 && statusCode < 300;
-}
-function isRetryableStatusCode(statusCode) {
- if (!statusCode) {
- return false;
- }
- const retryableStatusCodes = [
- http_client_1.HttpCodes.BadGateway,
- http_client_1.HttpCodes.ServiceUnavailable,
- http_client_1.HttpCodes.GatewayTimeout
- ];
- return retryableStatusCodes.includes(statusCode);
-}
-function getCacheApiUrl(resource) {
- // Ideally we just use ACTIONS_CACHE_URL
- const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
- process.env["ACTIONS_RUNTIME_URL"] ||
- "").replace("pipelines", "artifactcache");
- if (!baseUrl) {
- throw new Error("Cache Service Url not found, unable to restore cache.");
- }
- const url = `${baseUrl}_apis/artifactcache/${resource}`;
- core.debug(`Resource Url: ${url}`);
- return url;
-}
-function createAcceptHeader(type, apiVersion) {
- return `${type};api-version=${apiVersion}`;
-}
-function getRequestOptions() {
- const requestOptions = {
- headers: {
- Accept: createAcceptHeader("application/json", "6.0-preview.1")
- }
- };
- return requestOptions;
-}
-function createHttpClient() {
- const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
- const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
- return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
-}
-function getCacheVersion() {
- // Add salt to cache version to support breaking changes in cache entry
- const components = [
- core.getInput(constants_1.Inputs.Path, { required: true }),
- versionSalt
- ];
- return crypto
- .createHash("sha256")
- .update(components.join("|"))
- .digest("hex");
-}
-exports.getCacheVersion = getCacheVersion;
-function getCacheEntry(keys) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion();
- const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
- const response = yield httpClient.getJson(getCacheApiUrl(resource));
- if (response.statusCode === 204) {
- return null;
- }
- if (!isSuccessStatusCode(response.statusCode)) {
- throw new Error(`Cache service responded with ${response.statusCode}`);
- }
- const cacheResult = response.result;
- const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
- if (!cacheDownloadUrl) {
- throw new Error("Cache not found.");
- }
- core.setSecret(cacheDownloadUrl);
- core.debug(`Cache Result:`);
- core.debug(JSON.stringify(cacheResult));
- return cacheResult;
- });
-}
-exports.getCacheEntry = getCacheEntry;
-function pipeResponseToStream(response, output) {
- return __awaiter(this, void 0, void 0, function* () {
- const pipeline = util.promisify(stream.pipeline);
- yield pipeline(response.message, output);
- });
-}
-function downloadCache(archiveLocation, archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- const stream = fs.createWriteStream(archivePath);
- const httpClient = new http_client_1.HttpClient("actions/cache");
- const downloadResponse = yield httpClient.get(archiveLocation);
- // Abort download if no traffic received over the socket.
- downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
- downloadResponse.message.destroy();
- core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
- });
- yield pipeResponseToStream(downloadResponse, stream);
- // Validate download size.
- const contentLengthHeader = downloadResponse.message.headers["content-length"];
- if (contentLengthHeader) {
- const expectedLength = parseInt(contentLengthHeader);
- const actualLength = utils.getArchiveFileSize(archivePath);
- if (actualLength != expectedLength) {
- throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
- }
- }
- else {
- core.debug("Unable to validate download, no Content-Length header");
- }
- });
-}
-exports.downloadCache = downloadCache;
-// Reserve Cache
-function reserveCache(key) {
- var _a, _b;
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion();
- const reserveCacheRequest = {
- key,
- version
- };
- const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
- return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
- });
-}
-exports.reserveCache = reserveCache;
-function getContentRange(start, end) {
- // Format: `bytes start-end/filesize
- // start and end are inclusive
- // filesize can be *
- // For a 200 byte chunk starting at byte 0:
- // Content-Range: bytes 0-199/*
- return `bytes ${start}-${end}/*`;
-}
-function uploadChunk(httpClient, resourceUrl, data, start, end) {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug(`Uploading chunk of size ${end -
- start +
- 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
- const additionalHeaders = {
- "Content-Type": "application/octet-stream",
- "Content-Range": getContentRange(start, end)
- };
- const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
- return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
- });
- const response = yield uploadChunkRequest();
- if (isSuccessStatusCode(response.message.statusCode)) {
- return;
- }
- if (isRetryableStatusCode(response.message.statusCode)) {
- core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
- const retryResponse = yield uploadChunkRequest();
- if (isSuccessStatusCode(retryResponse.message.statusCode)) {
- return;
- }
- }
- throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
- });
-}
-function parseEnvNumber(key) {
- const value = Number(process.env[key]);
- if (Number.isNaN(value) || value < 0) {
- return undefined;
- }
- return value;
-}
-function uploadFile(httpClient, cacheId, archivePath) {
- var _a, _b;
- return __awaiter(this, void 0, void 0, function* () {
- // Upload Chunks
- const fileSize = fs.statSync(archivePath).size;
- const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
- const fd = fs.openSync(archivePath, "r");
- const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY")) !== null && _a !== void 0 ? _a : 4; // # of HTTP requests in parallel
- const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE")) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024; // 32 MB Chunks
- core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
- const parallelUploads = [...new Array(concurrency).keys()];
- core.debug("Awaiting all uploads");
- let offset = 0;
- try {
- yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
- while (offset < fileSize) {
- const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
- const start = offset;
- const end = offset + chunkSize - 1;
- offset += MAX_CHUNK_SIZE;
- const chunk = fs.createReadStream(archivePath, {
- fd,
- start,
- end,
- autoClose: false
- });
- yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
- }
- })));
- }
- finally {
- fs.closeSync(fd);
- }
- return;
- });
-}
-function commitCache(httpClient, cacheId, filesize) {
- return __awaiter(this, void 0, void 0, function* () {
- const commitCacheRequest = { size: filesize };
- return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
- });
-}
-function saveCache(cacheId, archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- core.debug("Upload cache");
- yield uploadFile(httpClient, cacheId, archivePath);
- // Commit Cache
- core.debug("Commiting cache");
- const cacheSize = utils.getArchiveFileSize(archivePath);
- const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
- if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
- throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
- }
- core.info("Cache saved successfully");
- });
-}
-exports.saveCache = saveCache;
-/***/ }),
-
-/***/ 357:
-/***/ (function(module) {
-
-module.exports = require("assert");
-
-/***/ }),
-
-/***/ 383:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const assert = __webpack_require__(357);
-const path = __webpack_require__(622);
-const pathHelper = __webpack_require__(972);
-const IS_WINDOWS = process.platform === 'win32';
-/**
- * Helper class for parsing paths into segments
- */
-class Path {
- /**
- * Constructs a Path
- * @param itemPath Path or array of segments
- */
- constructor(itemPath) {
- this.segments = [];
- // String
- if (typeof itemPath === 'string') {
- assert(itemPath, `Parameter 'itemPath' must not be empty`);
- // Normalize slashes and trim unnecessary trailing slash
- itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
- // Not rooted
- if (!pathHelper.hasRoot(itemPath)) {
- this.segments = itemPath.split(path.sep);
- }
- // Rooted
- else {
- // Add all segments, while not at the root
- let remaining = itemPath;
- let dir = pathHelper.dirname(remaining);
- while (dir !== remaining) {
- // Add the segment
- const basename = path.basename(remaining);
- this.segments.unshift(basename);
- // Truncate the last segment
- remaining = dir;
- dir = pathHelper.dirname(remaining);
- }
- // Remainder is the root
- this.segments.unshift(remaining);
- }
- }
- // Array
- else {
- // Must not be empty
- assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
- // Each segment
- for (let i = 0; i < itemPath.length; i++) {
- let segment = itemPath[i];
- // Must not be empty
- assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
- // Normalize slashes
- segment = pathHelper.normalizeSeparators(itemPath[i]);
- // Root segment
- if (i === 0 && pathHelper.hasRoot(segment)) {
- segment = pathHelper.safeTrimTrailingSeparator(segment);
- assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
- this.segments.push(segment);
- }
- // All other segments
- else {
- // Must not contain slash
- assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
- this.segments.push(segment);
- }
- }
- }
- }
- /**
- * Converts the path to it's string representation
- */
- toString() {
- // First segment
- let result = this.segments[0];
- // All others
- let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
- for (let i = 1; i < this.segments.length; i++) {
- if (skipSlash) {
- skipSlash = false;
- }
- else {
- result += path.sep;
- }
- result += this.segments[i];
- }
- return result;
- }
-}
-exports.Path = Path;
-//# sourceMappingURL=internal-path.js.map
-
-/***/ }),
-
-/***/ 413:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-module.exports = __webpack_require__(141);
-
-
-/***/ }),
-
-/***/ 417:
-/***/ (function(module) {
-
-module.exports = require("crypto");
-
-/***/ }),
-
-/***/ 431:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const os = __importStar(__webpack_require__(87));
-/**
- * Commands
- *
- * Command Format:
- * ::name key=value,key=value::message
- *
- * Examples:
- * ::warning::This is the message
- * ::set-env name=MY_VAR::some value
- */
-function issueCommand(command, properties, message) {
- const cmd = new Command(command, properties, message);
- process.stdout.write(cmd.toString() + os.EOL);
-}
-exports.issueCommand = issueCommand;
-function issue(name, message = '') {
- issueCommand(name, {}, message);
-}
-exports.issue = issue;
-const CMD_STRING = '::';
-class Command {
- constructor(command, properties, message) {
- if (!command) {
- command = 'missing.command';
- }
- this.command = command;
- this.properties = properties;
- this.message = message;
- }
- toString() {
- let cmdStr = CMD_STRING + this.command;
- if (this.properties && Object.keys(this.properties).length > 0) {
- cmdStr += ' ';
- let first = true;
- for (const key in this.properties) {
- if (this.properties.hasOwnProperty(key)) {
- const val = this.properties[key];
- if (val) {
- if (first) {
- first = false;
- }
- else {
- cmdStr += ',';
- }
- cmdStr += `${key}=${escapeProperty(val)}`;
- }
- }
- }
- }
- cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
- return cmdStr;
- }
-}
-/**
- * Sanitizes an input into a string so it can be passed into issueCommand safely
- * @param input input to sanitize into a string
- */
-function toCommandValue(input) {
- if (input === null || input === undefined) {
- return '';
- }
- else if (typeof input === 'string' || input instanceof String) {
- return input;
- }
- return JSON.stringify(input);
-}
-exports.toCommandValue = toCommandValue;
-function escapeData(s) {
- return toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A');
-}
-function escapeProperty(s) {
- return toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A')
- .replace(/:/g, '%3A')
- .replace(/,/g, '%2C');
-}
-//# sourceMappingURL=command.js.map
-
-/***/ }),
-
-/***/ 470:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const command_1 = __webpack_require__(431);
-const os = __importStar(__webpack_require__(87));
-const path = __importStar(__webpack_require__(622));
-/**
- * The code to exit an action
- */
-var ExitCode;
-(function (ExitCode) {
- /**
- * A code indicating that the action was successful
- */
- ExitCode[ExitCode["Success"] = 0] = "Success";
- /**
- * A code indicating that the action was a failure
- */
- ExitCode[ExitCode["Failure"] = 1] = "Failure";
-})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
-//-----------------------------------------------------------------------
-// Variables
-//-----------------------------------------------------------------------
-/**
- * Sets env variable for this action and future actions in the job
- * @param name the name of the variable to set
- * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function exportVariable(name, val) {
- const convertedVal = command_1.toCommandValue(val);
- process.env[name] = convertedVal;
- command_1.issueCommand('set-env', { name }, convertedVal);
-}
-exports.exportVariable = exportVariable;
-/**
- * Registers a secret which will get masked from logs
- * @param secret value of the secret
- */
-function setSecret(secret) {
- command_1.issueCommand('add-mask', {}, secret);
-}
-exports.setSecret = setSecret;
-/**
- * Prepends inputPath to the PATH (for this action and future actions)
- * @param inputPath
- */
-function addPath(inputPath) {
- command_1.issueCommand('add-path', {}, inputPath);
- process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
-}
-exports.addPath = addPath;
-/**
- * Gets the value of an input. The value is also trimmed.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string
- */
-function getInput(name, options) {
- const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
- if (options && options.required && !val) {
- throw new Error(`Input required and not supplied: ${name}`);
- }
- return val.trim();
-}
-exports.getInput = getInput;
-/**
- * Sets the value of an output.
- *
- * @param name name of the output to set
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function setOutput(name, value) {
- command_1.issueCommand('set-output', { name }, value);
-}
-exports.setOutput = setOutput;
-/**
- * Enables or disables the echoing of commands into stdout for the rest of the step.
- * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
- *
- */
-function setCommandEcho(enabled) {
- command_1.issue('echo', enabled ? 'on' : 'off');
-}
-exports.setCommandEcho = setCommandEcho;
-//-----------------------------------------------------------------------
-// Results
-//-----------------------------------------------------------------------
-/**
- * Sets the action status to failed.
- * When the action exits it will be with an exit code of 1
- * @param message add error issue message
- */
-function setFailed(message) {
- process.exitCode = ExitCode.Failure;
- error(message);
-}
-exports.setFailed = setFailed;
-//-----------------------------------------------------------------------
-// Logging Commands
-//-----------------------------------------------------------------------
-/**
- * Gets whether Actions Step Debug is on or not
- */
-function isDebug() {
- return process.env['RUNNER_DEBUG'] === '1';
-}
-exports.isDebug = isDebug;
-/**
- * Writes debug message to user log
- * @param message debug message
- */
-function debug(message) {
- command_1.issueCommand('debug', {}, message);
-}
-exports.debug = debug;
-/**
- * Adds an error issue
- * @param message error issue message. Errors will be converted to string via toString()
- */
-function error(message) {
- command_1.issue('error', message instanceof Error ? message.toString() : message);
-}
-exports.error = error;
-/**
- * Adds an warning issue
- * @param message warning issue message. Errors will be converted to string via toString()
- */
-function warning(message) {
- command_1.issue('warning', message instanceof Error ? message.toString() : message);
-}
-exports.warning = warning;
-/**
- * Writes info to log with console.log.
- * @param message info message
- */
-function info(message) {
- process.stdout.write(message + os.EOL);
-}
-exports.info = info;
-/**
- * Begin an output group.
- *
- * Output until the next `groupEnd` will be foldable in this group
- *
- * @param name The name of the output group
- */
-function startGroup(name) {
- command_1.issue('group', name);
-}
-exports.startGroup = startGroup;
-/**
- * End an output group.
- */
-function endGroup() {
- command_1.issue('endgroup');
-}
-exports.endGroup = endGroup;
-/**
- * Wrap an asynchronous function call in a group.
- *
- * Returns the same type as the function itself.
- *
- * @param name The name of the group
- * @param fn The function to wrap in the group
- */
-function group(name, fn) {
- return __awaiter(this, void 0, void 0, function* () {
- startGroup(name);
- let result;
- try {
- result = yield fn();
- }
- finally {
- endGroup();
- }
- return result;
- });
-}
-exports.group = group;
-//-----------------------------------------------------------------------
-// Wrapper action state
-//-----------------------------------------------------------------------
-/**
- * Saves state for current action, the state can only be retrieved by this action's post job execution.
- *
- * @param name name of the state to store
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function saveState(name, value) {
- command_1.issueCommand('save-state', { name }, value);
-}
-exports.saveState = saveState;
-/**
- * Gets the value of an state set by this action's main execution.
- *
- * @param name name of the state to get
- * @returns string
- */
-function getState(name) {
- return process.env[`STATE_${name}`] || '';
-}
-exports.getState = getState;
-//# sourceMappingURL=core.js.map
-
-/***/ }),
-
-/***/ 514:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const io = __importStar(__webpack_require__(1));
-const tc = __importStar(__webpack_require__(533));
-const installer = __importStar(__webpack_require__(634));
-const path_1 = __importDefault(__webpack_require__(622));
-const child_process_1 = __importDefault(__webpack_require__(129));
-const fs_1 = __importDefault(__webpack_require__(747));
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- //
- // versionSpec is optional. If supplied, install / use from the tool cache
- // If not supplied then problem matchers will still be setup. Useful for self-hosted.
- //
- let versionSpec = core.getInput('go-version');
- // stable will be true unless false is the exact input
- // since getting unstable versions should be explicit
- let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
- console.log(`Setup go ${stable ? 'stable' : ''} version spec ${versionSpec}`);
- if (versionSpec) {
- let installDir = tc.find('go', versionSpec);
- if (!installDir) {
- console.log(`A version satisfying ${versionSpec} not found locally, attempting to download ...`);
- installDir = yield installer.downloadGo(versionSpec, stable);
- console.log('Installed');
- }
- if (installDir) {
- core.exportVariable('GOROOT', installDir);
- core.addPath(path_1.default.join(installDir, 'bin'));
- console.log('Added go to the path');
- let added = addBinToPath();
- core.debug(`add bin ${added}`);
- }
- else {
- throw new Error(`Could not find a version that satisfied version spec: ${versionSpec}`);
- }
- }
- // add problem matchers
- const matchersPath = path_1.default.join(__dirname, '..', 'matchers.json');
- console.log(`##[add-matcher]${matchersPath}`);
- // output the version actually being used
- let goPath = yield io.which('go');
- let goVersion = (child_process_1.default.execSync(`${goPath} version`) || '').toString();
- console.log(goVersion);
- core.startGroup('go env');
- let goEnv = (child_process_1.default.execSync(`${goPath} env`) || '').toString();
- console.log(goEnv);
- core.endGroup();
- }
- catch (error) {
- core.setFailed(error.message);
- }
- });
-}
-exports.run = run;
-function addBinToPath() {
- return __awaiter(this, void 0, void 0, function* () {
- let added = false;
- let g = yield io.which('go');
- _debug(`which go :${g}:`);
- if (!g) {
- _debug('go not in the path');
- return added;
- }
- let buf = child_process_1.default.execSync('go env GOPATH');
- if (buf) {
- let gp = buf.toString().trim();
- _debug(`go env GOPATH :${gp}:`);
- if (!fs_1.default.existsSync(gp)) {
- // some of the hosted images have go install but not profile dir
- _debug(`creating ${gp}`);
- io.mkdirP(gp);
- }
- let bp = path_1.default.join(gp, 'bin');
- if (!fs_1.default.existsSync(bp)) {
- _debug(`creating ${bp}`);
- io.mkdirP(bp);
- }
- core.addPath(bp);
- added = true;
- }
- return added;
- });
-}
-exports.addBinToPath = addBinToPath;
-function _debug(message) {
- core.debug(message);
-}
-exports._debug = _debug;
-
-
-/***/ }),
-
-/***/ 533:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const io = __importStar(__webpack_require__(1));
-const fs = __importStar(__webpack_require__(747));
-const os = __importStar(__webpack_require__(87));
-const path = __importStar(__webpack_require__(622));
-const httpm = __importStar(__webpack_require__(539));
-const semver = __importStar(__webpack_require__(280));
-const stream = __importStar(__webpack_require__(794));
-const util = __importStar(__webpack_require__(669));
-const v4_1 = __importDefault(__webpack_require__(826));
-const exec_1 = __webpack_require__(986);
-const assert_1 = __webpack_require__(357);
-const retry_helper_1 = __webpack_require__(979);
-class HTTPError extends Error {
- constructor(httpStatusCode) {
- super(`Unexpected HTTP response: ${httpStatusCode}`);
- this.httpStatusCode = httpStatusCode;
- Object.setPrototypeOf(this, new.target.prototype);
- }
-}
-exports.HTTPError = HTTPError;
-const IS_WINDOWS = process.platform === 'win32';
-const userAgent = 'actions/tool-cache';
-/**
- * Download a tool from an url and stream it into a file
- *
- * @param url url of tool to download
- * @param dest path to download tool
- * @returns path to downloaded tool
- */
-function downloadTool(url, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- dest = dest || path.join(_getTempDirectory(), v4_1.default());
- yield io.mkdirP(path.dirname(dest));
- core.debug(`Downloading ${url}`);
- core.debug(`Destination ${dest}`);
- const maxAttempts = 3;
- const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
- const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
- const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
- return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
- return yield downloadToolAttempt(url, dest || '');
- }), (err) => {
- if (err instanceof HTTPError && err.httpStatusCode) {
- // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
- if (err.httpStatusCode < 500 &&
- err.httpStatusCode !== 408 &&
- err.httpStatusCode !== 429) {
- return false;
- }
- }
- // Otherwise retry
- return true;
- });
- });
-}
-exports.downloadTool = downloadTool;
-function downloadToolAttempt(url, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (fs.existsSync(dest)) {
- throw new Error(`Destination file path ${dest} already exists`);
- }
- // Get the response headers
- const http = new httpm.HttpClient(userAgent, [], {
- allowRetries: false
- });
- const response = yield http.get(url);
- if (response.message.statusCode !== 200) {
- const err = new HTTPError(response.message.statusCode);
- core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
- throw err;
- }
- // Download the response body
- const pipeline = util.promisify(stream.pipeline);
- const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
- const readStream = responseMessageFactory();
- let succeeded = false;
- try {
- yield pipeline(readStream, fs.createWriteStream(dest));
- core.debug('download complete');
- succeeded = true;
- return dest;
- }
- finally {
- // Error, delete dest before retry
- if (!succeeded) {
- core.debug('download failed');
- try {
- yield io.rmRF(dest);
- }
- catch (err) {
- core.debug(`Failed to delete '${dest}'. ${err.message}`);
- }
- }
- }
- });
-}
-/**
- * Extract a .7z file
- *
- * @param file path to the .7z file
- * @param dest destination directory. Optional.
- * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
- * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
- * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
- * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
- * interface, it is smaller than the full command line interface, and it does support long paths. At the
- * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
- * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
- * to 7zr.exe can be pass to this function.
- * @returns path to the destination directory
- */
-function extract7z(file, dest, _7zPath) {
- return __awaiter(this, void 0, void 0, function* () {
- assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
- assert_1.ok(file, 'parameter "file" is required');
- dest = yield _createExtractFolder(dest);
- const originalCwd = process.cwd();
- process.chdir(dest);
- if (_7zPath) {
- try {
- const args = [
- 'x',
- '-bb1',
- '-bd',
- '-sccUTF-8',
- file
- ];
- const options = {
- silent: true
- };
- yield exec_1.exec(`"${_7zPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- else {
- const escapedScript = path
- .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
- .replace(/'/g, "''")
- .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- const options = {
- silent: true
- };
- try {
- const powershellPath = yield io.which('powershell', true);
- yield exec_1.exec(`"${powershellPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- return dest;
- });
-}
-exports.extract7z = extract7z;
-/**
- * Extract a compressed tar archive
- *
- * @param file path to the tar
- * @param dest destination directory. Optional.
- * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
- * @returns path to the destination directory
- */
-function extractTar(file, dest, flags = 'xz') {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- // Create dest
- dest = yield _createExtractFolder(dest);
- // Determine whether GNU tar
- core.debug('Checking tar --version');
- let versionOutput = '';
- yield exec_1.exec('tar --version', [], {
- ignoreReturnCode: true,
- silent: true,
- listeners: {
- stdout: (data) => (versionOutput += data.toString()),
- stderr: (data) => (versionOutput += data.toString())
- }
- });
- core.debug(versionOutput.trim());
- const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
- // Initialize args
- const args = [flags];
- let destArg = dest;
- let fileArg = file;
- if (IS_WINDOWS && isGnuTar) {
- args.push('--force-local');
- destArg = dest.replace(/\\/g, '/');
- // Technically only the dest needs to have `/` but for aesthetic consistency
- // convert slashes in the file arg too.
- fileArg = file.replace(/\\/g, '/');
- }
- if (isGnuTar) {
- // Suppress warnings when using GNU tar to extract archives created by BSD tar
- args.push('--warning=no-unknown-keyword');
- }
- args.push('-C', destArg, '-f', fileArg);
- yield exec_1.exec(`tar`, args);
- return dest;
- });
-}
-exports.extractTar = extractTar;
-/**
- * Extract a zip
- *
- * @param file path to the zip
- * @param dest destination directory. Optional.
- * @returns path to the destination directory
- */
-function extractZip(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- dest = yield _createExtractFolder(dest);
- if (IS_WINDOWS) {
- yield extractZipWin(file, dest);
- }
- else {
- yield extractZipNix(file, dest);
- }
- return dest;
- });
-}
-exports.extractZip = extractZip;
-function extractZipWin(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- // build the powershell command
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
- // run powershell
- const powershellPath = yield io.which('powershell');
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- yield exec_1.exec(`"${powershellPath}"`, args);
- });
-}
-function extractZipNix(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- const unzipPath = yield io.which('unzip');
- yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
- });
-}
-/**
- * Caches a directory and installs it into the tool cacheDir
- *
- * @param sourceDir the directory to cache into tools
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheDir(sourceDir, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source dir: ${sourceDir}`);
- if (!fs.statSync(sourceDir).isDirectory()) {
- throw new Error('sourceDir is not a directory');
- }
- // Create the tool dir
- const destPath = yield _createToolPath(tool, version, arch);
- // copy each child item. do not move. move can fail on Windows
- // due to anti-virus software having an open handle on a file.
- for (const itemName of fs.readdirSync(sourceDir)) {
- const s = path.join(sourceDir, itemName);
- yield io.cp(s, destPath, { recursive: true });
- }
- // write .complete
- _completeToolPath(tool, version, arch);
- return destPath;
- });
-}
-exports.cacheDir = cacheDir;
-/**
- * Caches a downloaded file (GUID) and installs it
- * into the tool cache with a given targetName
- *
- * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
- * @param targetFile the name of the file name in the tools directory
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheFile(sourceFile, targetFile, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source file: ${sourceFile}`);
- if (!fs.statSync(sourceFile).isFile()) {
- throw new Error('sourceFile is not a file');
- }
- // create the tool dir
- const destFolder = yield _createToolPath(tool, version, arch);
- // copy instead of move. move can fail on Windows due to
- // anti-virus software having an open handle on a file.
- const destPath = path.join(destFolder, targetFile);
- core.debug(`destination file ${destPath}`);
- yield io.cp(sourceFile, destPath);
- // write .complete
- _completeToolPath(tool, version, arch);
- return destFolder;
- });
-}
-exports.cacheFile = cacheFile;
-/**
- * Finds the path to a tool version in the local installed tool cache
- *
- * @param toolName name of the tool
- * @param versionSpec version of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function find(toolName, versionSpec, arch) {
- if (!toolName) {
- throw new Error('toolName parameter is required');
- }
- if (!versionSpec) {
- throw new Error('versionSpec parameter is required');
- }
- arch = arch || os.arch();
- // attempt to resolve an explicit version
- if (!_isExplicitVersion(versionSpec)) {
- const localVersions = findAllVersions(toolName, arch);
- const match = _evaluateVersions(localVersions, versionSpec);
- versionSpec = match;
- }
- // check for the explicit version in the cache
- let toolPath = '';
- if (versionSpec) {
- versionSpec = semver.clean(versionSpec) || '';
- const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
- core.debug(`checking cache: ${cachePath}`);
- if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
- core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
- toolPath = cachePath;
- }
- else {
- core.debug('not found');
- }
- }
- return toolPath;
-}
-exports.find = find;
-/**
- * Finds the paths to all versions of a tool that are installed in the local tool cache
- *
- * @param toolName name of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function findAllVersions(toolName, arch) {
- const versions = [];
- arch = arch || os.arch();
- const toolPath = path.join(_getCacheDirectory(), toolName);
- if (fs.existsSync(toolPath)) {
- const children = fs.readdirSync(toolPath);
- for (const child of children) {
- if (_isExplicitVersion(child)) {
- const fullPath = path.join(toolPath, child, arch || '');
- if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
- versions.push(child);
- }
- }
- }
- }
- return versions;
-}
-exports.findAllVersions = findAllVersions;
-function _createExtractFolder(dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!dest) {
- // create a temp dir
- dest = path.join(_getTempDirectory(), v4_1.default());
- }
- yield io.mkdirP(dest);
- return dest;
- });
-}
-function _createToolPath(tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
- core.debug(`destination ${folderPath}`);
- const markerPath = `${folderPath}.complete`;
- yield io.rmRF(folderPath);
- yield io.rmRF(markerPath);
- yield io.mkdirP(folderPath);
- return folderPath;
- });
-}
-function _completeToolPath(tool, version, arch) {
- const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
- const markerPath = `${folderPath}.complete`;
- fs.writeFileSync(markerPath, '');
- core.debug('finished caching tool');
-}
-function _isExplicitVersion(versionSpec) {
- const c = semver.clean(versionSpec) || '';
- core.debug(`isExplicit: ${c}`);
- const valid = semver.valid(c) != null;
- core.debug(`explicit? ${valid}`);
- return valid;
-}
-function _evaluateVersions(versions, versionSpec) {
- let version = '';
- core.debug(`evaluating ${versions.length} versions`);
- versions = versions.sort((a, b) => {
- if (semver.gt(a, b)) {
- return 1;
- }
- return -1;
- });
- for (let i = versions.length - 1; i >= 0; i--) {
- const potential = versions[i];
- const satisfied = semver.satisfies(potential, versionSpec);
- if (satisfied) {
- version = potential;
- break;
- }
- }
- if (version) {
- core.debug(`matched: ${version}`);
- }
- else {
- core.debug('match not found');
- }
- return version;
-}
-/**
- * Gets RUNNER_TOOL_CACHE
- */
-function _getCacheDirectory() {
- const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
- assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
- return cacheDirectory;
-}
-/**
- * Gets RUNNER_TEMP
- */
-function _getTempDirectory() {
- const tempDirectory = process.env['RUNNER_TEMP'] || '';
- assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
- return tempDirectory;
-}
-/**
- * Gets a global variable
- */
-function _getGlobal(key, defaultValue) {
- /* eslint-disable @typescript-eslint/no-explicit-any */
- const value = global[key];
- /* eslint-enable @typescript-eslint/no-explicit-any */
- return value !== undefined ? value : defaultValue;
-}
-//# sourceMappingURL=tool-cache.js.map
-
-/***/ }),
-
-/***/ 539:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const url = __webpack_require__(835);
-const http = __webpack_require__(605);
-const https = __webpack_require__(211);
-const pm = __webpack_require__(950);
-let tunnel;
-var HttpCodes;
-(function (HttpCodes) {
- HttpCodes[HttpCodes["OK"] = 200] = "OK";
- HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
- HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
- HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
- HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
- HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
- HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
- HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
- HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
- HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
- HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
- HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
- HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
- HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
- HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
- HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
- HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
- HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
- HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
- HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
- HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
- HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
- HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
- HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
- HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
- HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
- HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
-var Headers;
-(function (Headers) {
- Headers["Accept"] = "accept";
- Headers["ContentType"] = "content-type";
-})(Headers = exports.Headers || (exports.Headers = {}));
-var MediaTypes;
-(function (MediaTypes) {
- MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
-/**
- * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
-function getProxyUrl(serverUrl) {
- let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
- return proxyUrl ? proxyUrl.href : '';
-}
-exports.getProxyUrl = getProxyUrl;
-const HttpRedirectCodes = [
- HttpCodes.MovedPermanently,
- HttpCodes.ResourceMoved,
- HttpCodes.SeeOther,
- HttpCodes.TemporaryRedirect,
- HttpCodes.PermanentRedirect
-];
-const HttpResponseRetryCodes = [
- HttpCodes.BadGateway,
- HttpCodes.ServiceUnavailable,
- HttpCodes.GatewayTimeout
-];
-const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
-const ExponentialBackoffCeiling = 10;
-const ExponentialBackoffTimeSlice = 5;
-class HttpClientResponse {
- constructor(message) {
- this.message = message;
- }
- readBody() {
- return new Promise(async (resolve, reject) => {
- let output = Buffer.alloc(0);
- this.message.on('data', (chunk) => {
- output = Buffer.concat([output, chunk]);
- });
- this.message.on('end', () => {
- resolve(output.toString());
- });
- });
- }
-}
-exports.HttpClientResponse = HttpClientResponse;
-function isHttps(requestUrl) {
- let parsedUrl = url.parse(requestUrl);
- return parsedUrl.protocol === 'https:';
-}
-exports.isHttps = isHttps;
-class HttpClient {
- constructor(userAgent, handlers, requestOptions) {
- this._ignoreSslError = false;
- this._allowRedirects = true;
- this._allowRedirectDowngrade = false;
- this._maxRedirects = 50;
- this._allowRetries = false;
- this._maxRetries = 1;
- this._keepAlive = false;
- this._disposed = false;
- this.userAgent = userAgent;
- this.handlers = handlers || [];
- this.requestOptions = requestOptions;
- if (requestOptions) {
- if (requestOptions.ignoreSslError != null) {
- this._ignoreSslError = requestOptions.ignoreSslError;
- }
- this._socketTimeout = requestOptions.socketTimeout;
- if (requestOptions.allowRedirects != null) {
- this._allowRedirects = requestOptions.allowRedirects;
- }
- if (requestOptions.allowRedirectDowngrade != null) {
- this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
- }
- if (requestOptions.maxRedirects != null) {
- this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
- }
- if (requestOptions.keepAlive != null) {
- this._keepAlive = requestOptions.keepAlive;
- }
- if (requestOptions.allowRetries != null) {
- this._allowRetries = requestOptions.allowRetries;
- }
- if (requestOptions.maxRetries != null) {
- this._maxRetries = requestOptions.maxRetries;
- }
- }
- }
- options(requestUrl, additionalHeaders) {
- return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
- }
- get(requestUrl, additionalHeaders) {
- return this.request('GET', requestUrl, null, additionalHeaders || {});
- }
- del(requestUrl, additionalHeaders) {
- return this.request('DELETE', requestUrl, null, additionalHeaders || {});
- }
- post(requestUrl, data, additionalHeaders) {
- return this.request('POST', requestUrl, data, additionalHeaders || {});
- }
- patch(requestUrl, data, additionalHeaders) {
- return this.request('PATCH', requestUrl, data, additionalHeaders || {});
- }
- put(requestUrl, data, additionalHeaders) {
- return this.request('PUT', requestUrl, data, additionalHeaders || {});
- }
- head(requestUrl, additionalHeaders) {
- return this.request('HEAD', requestUrl, null, additionalHeaders || {});
- }
- sendStream(verb, requestUrl, stream, additionalHeaders) {
- return this.request(verb, requestUrl, stream, additionalHeaders);
- }
- /**
- * Gets a typed object from an endpoint
- * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
- */
- async getJson(requestUrl, additionalHeaders = {}) {
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- let res = await this.get(requestUrl, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async postJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.post(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async putJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.put(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async patchJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.patch(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- /**
- * Makes a raw http request.
- * All other methods such as get, post, patch, and request ultimately call this.
- * Prefer get, del, post and patch
- */
- async request(verb, requestUrl, data, headers) {
- if (this._disposed) {
- throw new Error('Client has already been disposed.');
- }
- let parsedUrl = url.parse(requestUrl);
- let info = this._prepareRequest(verb, parsedUrl, headers);
- // Only perform retries on reads since writes may not be idempotent.
- let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
- ? this._maxRetries + 1
- : 1;
- let numTries = 0;
- let response;
- while (numTries < maxTries) {
- response = await this.requestRaw(info, data);
- // Check if it's an authentication challenge
- if (response &&
- response.message &&
- response.message.statusCode === HttpCodes.Unauthorized) {
- let authenticationHandler;
- for (let i = 0; i < this.handlers.length; i++) {
- if (this.handlers[i].canHandleAuthentication(response)) {
- authenticationHandler = this.handlers[i];
- break;
- }
- }
- if (authenticationHandler) {
- return authenticationHandler.handleAuthentication(this, info, data);
- }
- else {
- // We have received an unauthorized response but have no handlers to handle it.
- // Let the response return to the caller.
- return response;
- }
- }
- let redirectsRemaining = this._maxRedirects;
- while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
- this._allowRedirects &&
- redirectsRemaining > 0) {
- const redirectUrl = response.message.headers['location'];
- if (!redirectUrl) {
- // if there's no location to redirect to, we won't
- break;
- }
- let parsedRedirectUrl = url.parse(redirectUrl);
- if (parsedUrl.protocol == 'https:' &&
- parsedUrl.protocol != parsedRedirectUrl.protocol &&
- !this._allowRedirectDowngrade) {
- throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
- }
- // we need to finish reading the response before reassigning response
- // which will leak the open socket.
- await response.readBody();
- // strip authorization header if redirected to a different hostname
- if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
- for (let header in headers) {
- // header names are case insensitive
- if (header.toLowerCase() === 'authorization') {
- delete headers[header];
- }
- }
- }
- // let's make the request with the new redirectUrl
- info = this._prepareRequest(verb, parsedRedirectUrl, headers);
- response = await this.requestRaw(info, data);
- redirectsRemaining--;
- }
- if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
- // If not a retry code, return immediately instead of retrying
- return response;
- }
- numTries += 1;
- if (numTries < maxTries) {
- await response.readBody();
- await this._performExponentialBackoff(numTries);
- }
- }
- return response;
- }
- /**
- * Needs to be called if keepAlive is set to true in request options.
- */
- dispose() {
- if (this._agent) {
- this._agent.destroy();
- }
- this._disposed = true;
- }
- /**
- * Raw request.
- * @param info
- * @param data
- */
- requestRaw(info, data) {
- return new Promise((resolve, reject) => {
- let callbackForResult = function (err, res) {
- if (err) {
- reject(err);
- }
- resolve(res);
- };
- this.requestRawWithCallback(info, data, callbackForResult);
- });
- }
- /**
- * Raw request with callback.
- * @param info
- * @param data
- * @param onResult
- */
- requestRawWithCallback(info, data, onResult) {
- let socket;
- if (typeof data === 'string') {
- info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
- }
- let callbackCalled = false;
- let handleResult = (err, res) => {
- if (!callbackCalled) {
- callbackCalled = true;
- onResult(err, res);
- }
- };
- let req = info.httpModule.request(info.options, (msg) => {
- let res = new HttpClientResponse(msg);
- handleResult(null, res);
- });
- req.on('socket', sock => {
- socket = sock;
- });
- // If we ever get disconnected, we want the socket to timeout eventually
- req.setTimeout(this._socketTimeout || 3 * 60000, () => {
- if (socket) {
- socket.end();
- }
- handleResult(new Error('Request timeout: ' + info.options.path), null);
- });
- req.on('error', function (err) {
- // err has statusCode property
- // res should have headers
- handleResult(err, null);
- });
- if (data && typeof data === 'string') {
- req.write(data, 'utf8');
- }
- if (data && typeof data !== 'string') {
- data.on('close', function () {
- req.end();
- });
- data.pipe(req);
- }
- else {
- req.end();
- }
- }
- /**
- * Gets an http agent. This function is useful when you need an http agent that handles
- * routing through a proxy server - depending upon the url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
- getAgent(serverUrl) {
- let parsedUrl = url.parse(serverUrl);
- return this._getAgent(parsedUrl);
- }
- _prepareRequest(method, requestUrl, headers) {
- const info = {};
- info.parsedUrl = requestUrl;
- const usingSsl = info.parsedUrl.protocol === 'https:';
- info.httpModule = usingSsl ? https : http;
- const defaultPort = usingSsl ? 443 : 80;
- info.options = {};
- info.options.host = info.parsedUrl.hostname;
- info.options.port = info.parsedUrl.port
- ? parseInt(info.parsedUrl.port)
- : defaultPort;
- info.options.path =
- (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
- info.options.method = method;
- info.options.headers = this._mergeHeaders(headers);
- if (this.userAgent != null) {
- info.options.headers['user-agent'] = this.userAgent;
- }
- info.options.agent = this._getAgent(info.parsedUrl);
- // gives handlers an opportunity to participate
- if (this.handlers) {
- this.handlers.forEach(handler => {
- handler.prepareRequest(info.options);
- });
- }
- return info;
- }
- _mergeHeaders(headers) {
- const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
- if (this.requestOptions && this.requestOptions.headers) {
- return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
- }
- return lowercaseKeys(headers || {});
- }
- _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
- const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
- let clientHeader;
- if (this.requestOptions && this.requestOptions.headers) {
- clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
- }
- return additionalHeaders[header] || clientHeader || _default;
- }
- _getAgent(parsedUrl) {
- let agent;
- let proxyUrl = pm.getProxyUrl(parsedUrl);
- let useProxy = proxyUrl && proxyUrl.hostname;
- if (this._keepAlive && useProxy) {
- agent = this._proxyAgent;
- }
- if (this._keepAlive && !useProxy) {
- agent = this._agent;
- }
- // if agent is already assigned use that agent.
- if (!!agent) {
- return agent;
- }
- const usingSsl = parsedUrl.protocol === 'https:';
- let maxSockets = 100;
- if (!!this.requestOptions) {
- maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
- }
- if (useProxy) {
- // If using proxy, need tunnel
- if (!tunnel) {
- tunnel = __webpack_require__(413);
- }
- const agentOptions = {
- maxSockets: maxSockets,
- keepAlive: this._keepAlive,
- proxy: {
- proxyAuth: proxyUrl.auth,
- host: proxyUrl.hostname,
- port: proxyUrl.port
- }
- };
- let tunnelAgent;
- const overHttps = proxyUrl.protocol === 'https:';
- if (usingSsl) {
- tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
- }
- else {
- tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
- }
- agent = tunnelAgent(agentOptions);
- this._proxyAgent = agent;
- }
- // if reusing agent across request and tunneling agent isn't assigned create a new agent
- if (this._keepAlive && !agent) {
- const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
- agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
- this._agent = agent;
- }
- // if not using private agent and tunnel agent isn't setup then use global agent
- if (!agent) {
- agent = usingSsl ? https.globalAgent : http.globalAgent;
- }
- if (usingSsl && this._ignoreSslError) {
- // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
- // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
- // we have to cast it to any and change it directly
- agent.options = Object.assign(agent.options || {}, {
- rejectUnauthorized: false
- });
- }
- return agent;
- }
- _performExponentialBackoff(retryNumber) {
- retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
- const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
- return new Promise(resolve => setTimeout(() => resolve(), ms));
- }
- static dateTimeDeserializer(key, value) {
- if (typeof value === 'string') {
- let a = new Date(value);
- if (!isNaN(a.valueOf())) {
- return a;
- }
- }
- return value;
- }
- async _processResponse(res, options) {
- return new Promise(async (resolve, reject) => {
- const statusCode = res.message.statusCode;
- const response = {
- statusCode: statusCode,
- result: null,
- headers: {}
- };
- // not found leads to null obj returned
- if (statusCode == HttpCodes.NotFound) {
- resolve(response);
- }
- let obj;
- let contents;
- // get the result from the body
- try {
- contents = await res.readBody();
- if (contents && contents.length > 0) {
- if (options && options.deserializeDates) {
- obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
- }
- else {
- obj = JSON.parse(contents);
- }
- response.result = obj;
- }
- response.headers = res.message.headers;
- }
- catch (err) {
- // Invalid resource (contents not json); leaving result obj null
- }
- // note that 3xx redirects are handled by the http layer.
- if (statusCode > 299) {
- let msg;
- // if exception/error in body, attempt to get better error
- if (obj && obj.message) {
- msg = obj.message;
- }
- else if (contents && contents.length > 0) {
- // it may be the case that the exception is in the body message as string
- msg = contents;
- }
- else {
- msg = 'Failed request: (' + statusCode + ')';
- }
- let err = new Error(msg);
- // attach statusCode and body obj (if available) to the error object
- err['statusCode'] = statusCode;
- if (response.result) {
- err['result'] = response.result;
- }
- reject(err);
- }
- else {
- resolve(response);
- }
- });
- }
-}
-exports.HttpClient = HttpClient;
-
-
-/***/ }),
-
-/***/ 570:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const exec_1 = __webpack_require__(986);
-const io = __importStar(__webpack_require__(1));
-const fs_1 = __webpack_require__(747);
-const path = __importStar(__webpack_require__(622));
-const constants_1 = __webpack_require__(929);
-function isGnuTar() {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug("Checking tar --version");
- let versionOutput = "";
- yield exec_1.exec("tar --version", [], {
- ignoreReturnCode: true,
- silent: true,
- listeners: {
- stdout: (data) => (versionOutput += data.toString()),
- stderr: (data) => (versionOutput += data.toString())
- }
- });
- core.debug(versionOutput.trim());
- return versionOutput.toUpperCase().includes("GNU TAR");
- });
-}
-exports.isGnuTar = isGnuTar;
-function getTarPath(args) {
- return __awaiter(this, void 0, void 0, function* () {
- // Explicitly use BSD Tar on Windows
- const IS_WINDOWS = process.platform === "win32";
- if (IS_WINDOWS) {
- const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
- if (fs_1.existsSync(systemTar)) {
- return systemTar;
- }
- else if (isGnuTar()) {
- args.push("--force-local");
- }
- }
- return yield io.which("tar", true);
- });
-}
-function execTar(args, cwd) {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
- }
- catch (error) {
- throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
- }
- });
-}
-function getWorkingDirectory() {
- var _a;
- return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd();
-}
-function extractTar(archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- // Create directory to extract tar into
- const workingDirectory = getWorkingDirectory();
- yield io.mkdirP(workingDirectory);
- const args = [
- "-xz",
- "-f",
- archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "-P",
- "-C",
- workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
- ];
- yield execTar(args);
- });
-}
-exports.extractTar = extractTar;
-function createTar(archiveFolder, sourceDirectories) {
- return __awaiter(this, void 0, void 0, function* () {
- // Write source directories to manifest.txt to avoid command length limits
- const manifestFilename = "manifest.txt";
- fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
- const workingDirectory = getWorkingDirectory();
- const args = [
- "-cz",
- "-f",
- constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "-P",
- "-C",
- workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "--files-from",
- manifestFilename
- ];
- yield execTar(args, archiveFolder);
- });
-}
-exports.createTar = createTar;
-
-
-/***/ }),
-
-/***/ 597:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const pathHelper = __webpack_require__(972);
-const internal_match_kind_1 = __webpack_require__(327);
-const IS_WINDOWS = process.platform === 'win32';
-/**
- * Given an array of patterns, returns an array of paths to search.
- * Duplicates and paths under other included paths are filtered out.
- */
-function getSearchPaths(patterns) {
- // Ignore negate patterns
- patterns = patterns.filter(x => !x.negate);
- // Create a map of all search paths
- const searchPathMap = {};
- for (const pattern of patterns) {
- const key = IS_WINDOWS
- ? pattern.searchPath.toUpperCase()
- : pattern.searchPath;
- searchPathMap[key] = 'candidate';
- }
- const result = [];
- for (const pattern of patterns) {
- // Check if already included
- const key = IS_WINDOWS
- ? pattern.searchPath.toUpperCase()
- : pattern.searchPath;
- if (searchPathMap[key] === 'included') {
- continue;
- }
- // Check for an ancestor search path
- let foundAncestor = false;
- let tempKey = key;
- let parent = pathHelper.dirname(tempKey);
- while (parent !== tempKey) {
- if (searchPathMap[parent]) {
- foundAncestor = true;
- break;
- }
- tempKey = parent;
- parent = pathHelper.dirname(tempKey);
- }
- // Include the search pattern in the result
- if (!foundAncestor) {
- result.push(pattern.searchPath);
- searchPathMap[key] = 'included';
- }
- }
- return result;
-}
-exports.getSearchPaths = getSearchPaths;
-/**
- * Matches the patterns against the path
- */
-function match(patterns, itemPath) {
- let result = internal_match_kind_1.MatchKind.None;
- for (const pattern of patterns) {
- if (pattern.negate) {
- result &= ~pattern.match(itemPath);
- }
- else {
- result |= pattern.match(itemPath);
- }
- }
- return result;
-}
-exports.match = match;
-/**
- * Checks whether to descend further into the directory
- */
-function partialMatch(patterns, itemPath) {
- return patterns.some(x => !x.negate && x.partialMatch(itemPath));
-}
-exports.partialMatch = partialMatch;
-//# sourceMappingURL=internal-pattern-helper.js.map
-
-/***/ }),
-
-/***/ 601:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __webpack_require__(470);
-/**
- * Returns a copy with defaults filled in.
- */
-function getOptions(copy) {
- const result = {
- followSymbolicLinks: true,
- implicitDescendants: true,
- omitBrokenSymbolicLinks: true
- };
- if (copy) {
- if (typeof copy.followSymbolicLinks === 'boolean') {
- result.followSymbolicLinks = copy.followSymbolicLinks;
- core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
- }
- if (typeof copy.implicitDescendants === 'boolean') {
- result.implicitDescendants = copy.implicitDescendants;
- core.debug(`implicitDescendants '${result.implicitDescendants}'`);
- }
- if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
- result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
- core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
- }
- }
- return result;
-}
-exports.getOptions = getOptions;
-//# sourceMappingURL=internal-glob-options-helper.js.map
-
-/***/ }),
-
-/***/ 605:
-/***/ (function(module) {
-
-module.exports = require("http");
-
-/***/ }),
-
-/***/ 614:
-/***/ (function(module) {
-
-module.exports = require("events");
-
-/***/ }),
-
-/***/ 621:
-/***/ (function(module) {
-
-"use strict";
-
-module.exports = balanced;
-function balanced(a, b, str) {
- if (a instanceof RegExp) a = maybeMatch(a, str);
- if (b instanceof RegExp) b = maybeMatch(b, str);
-
- var r = range(a, b, str);
-
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
-}
-
-function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
-}
-
-balanced.range = range;
-function range(a, b, str) {
- var begs, beg, left, right, result;
- var ai = str.indexOf(a);
- var bi = str.indexOf(b, ai + 1);
- var i = ai;
-
- if (ai >= 0 && bi > 0) {
- begs = [];
- left = str.length;
-
- while (i >= 0 && !result) {
- if (i == ai) {
- begs.push(i);
- ai = str.indexOf(a, i + 1);
- } else if (begs.length == 1) {
- result = [ begs.pop(), bi ];
- } else {
- beg = begs.pop();
- if (beg < left) {
- left = beg;
- right = bi;
- }
-
- bi = str.indexOf(b, i + 1);
- }
-
- i = ai < bi && ai >= 0 ? ai : bi;
- }
-
- if (begs.length) {
- result = [ left, right ];
- }
- }
-
- return result;
-}
-
-
-/***/ }),
-
-/***/ 622:
-/***/ (function(module) {
-
-module.exports = require("path");
-
-/***/ }),
-
-/***/ 631:
-/***/ (function(module) {
-
-module.exports = require("net");
-
-/***/ }),
-
-/***/ 634:
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const tc = __importStar(__webpack_require__(533));
-const path = __importStar(__webpack_require__(622));
-const semver = __importStar(__webpack_require__(280));
-const httpm = __importStar(__webpack_require__(539));
-const sys = __importStar(__webpack_require__(646));
-const core_1 = __webpack_require__(470);
-function downloadGo(versionSpec, stable) {
- return __awaiter(this, void 0, void 0, function* () {
- let toolPath;
- try {
- let match = yield findMatch(versionSpec, stable);
- if (match) {
- // download
- core_1.debug(`match ${match.version}`);
- let downloadUrl = `https://storage.googleapis.com/golang/${match.files[0].filename}`;
- console.log(`Downloading from ${downloadUrl}`);
- let downloadPath = yield tc.downloadTool(downloadUrl);
- core_1.debug(`downloaded to ${downloadPath}`);
- // extract
- console.log('Extracting ...');
- let extPath = sys.getPlatform() == 'windows'
- ? yield tc.extractZip(downloadPath)
- : yield tc.extractTar(downloadPath);
- core_1.debug(`extracted to ${extPath}`);
- // extracts with a root folder that matches the fileName downloaded
- const toolRoot = path.join(extPath, 'go');
- toolPath = yield tc.cacheDir(toolRoot, 'go', makeSemver(match.version));
- }
- }
- catch (error) {
- throw new Error(`Failed to download version ${versionSpec}: ${error}`);
- }
- return toolPath;
- });
-}
-exports.downloadGo = downloadGo;
-function findMatch(versionSpec, stable) {
- return __awaiter(this, void 0, void 0, function* () {
- let archFilter = sys.getArch();
- let platFilter = sys.getPlatform();
- let result;
- let match;
- const dlUrl = 'https://golang.org/dl/?mode=json&include=all';
- let candidates = yield module.exports.getVersions(dlUrl);
- if (!candidates) {
- throw new Error(`golang download url did not return results`);
- }
- let goFile;
- for (let i = 0; i < candidates.length; i++) {
- let candidate = candidates[i];
- let version = makeSemver(candidate.version);
- // 1.13.0 is advertised as 1.13 preventing being able to match exactly 1.13.0
- // since a semver of 1.13 would match latest 1.13
- let parts = version.split('.');
- if (parts.length == 2) {
- version = version + '.0';
- }
- core_1.debug(`check ${version} satisfies ${versionSpec}`);
- if (semver.satisfies(version, versionSpec) &&
- (!stable || candidate.stable === stable)) {
- goFile = candidate.files.find(file => {
- core_1.debug(`${file.arch}===${archFilter} && ${file.os}===${platFilter}`);
- return file.arch === archFilter && file.os === platFilter;
- });
- if (goFile) {
- core_1.debug(`matched ${candidate.version}`);
- match = candidate;
- break;
- }
- }
- }
- if (match && goFile) {
- // clone since we're mutating the file list to be only the file that matches
- result = Object.assign({}, match);
- result.files = [goFile];
- }
- return result;
- });
-}
-exports.findMatch = findMatch;
-function getVersions(dlUrl) {
- return __awaiter(this, void 0, void 0, function* () {
- // this returns versions descending so latest is first
- let http = new httpm.HttpClient('setup-go');
- return (yield http.getJson(dlUrl)).result;
- });
-}
-exports.getVersions = getVersions;
-//
-// Convert the go version syntax into semver for semver matching
-// 1.13.1 => 1.13.1
-// 1.13 => 1.13.0
-// 1.10beta1 => 1.10.0-beta1, 1.10rc1 => 1.10.0-rc1
-// 1.8.5beta1 => 1.8.5-beta1, 1.8.5rc1 => 1.8.5-rc1
-function makeSemver(version) {
- version = version.replace('go', '');
- version = version.replace('beta', '-beta').replace('rc', '-rc');
- let parts = version.split('-');
- let verPart = parts[0];
- let prereleasePart = parts.length > 1 ? `-${parts[1]}` : '';
- let verParts = verPart.split('.');
- if (verParts.length == 2) {
- verPart += '.0';
- }
- return `${verPart}${prereleasePart}`;
-}
-exports.makeSemver = makeSemver;
-
-
-/***/ }),
-
-/***/ 646:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-let os = __webpack_require__(87);
-function getPlatform() {
- // darwin and linux match already
- // freebsd not supported yet but future proofed.
- // 'aix', 'darwin', 'freebsd', 'linux', 'openbsd', 'sunos', and 'win32'
- let plat = os.platform();
- // wants 'darwin', 'freebsd', 'linux', 'windows'
- if (plat === 'win32') {
- plat = 'windows';
- }
- return plat;
-}
-exports.getPlatform = getPlatform;
-function getArch() {
- // 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', 'x32', and 'x64'.
- let arch = os.arch();
- // wants amd64, 386, arm64, armv61, ppc641e, s390x
- // currently not supported by runner but future proofed mapping
- switch (arch) {
- case 'x64':
- arch = 'amd64';
- break;
- // case 'ppc':
- // arch = 'ppc64';
- // break;
- case 'x32':
- arch = '386';
- break;
- }
- return arch;
-}
-exports.getArch = getArch;
-
-
-/***/ }),
-
-/***/ 655:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const tc = __importStar(__webpack_require__(533));
-const path_1 = __importDefault(__webpack_require__(622));
-const main_1 = __webpack_require__(514);
-// The installLint returns path to installed binary of golangci-lint.
-function installLint(versionConfig) {
- return __awaiter(this, void 0, void 0, function* () {
- core.info(`Installing golangci-lint ${versionConfig.TargetVersion}...`);
- const startedAt = Date.now();
- core.info(`Downloading ${versionConfig.AssetURL} ...`);
- const tarGzPath = yield tc.downloadTool(versionConfig.AssetURL);
- const extractedDir = yield tc.extractTar(tarGzPath, process.env.HOME);
- const urlParts = versionConfig.AssetURL.split(`/`);
- const dirName = urlParts[urlParts.length - 1].replace(/\.tar\.gz$/, ``);
- const lintPath = path_1.default.join(extractedDir, dirName, `golangci-lint`);
- core.info(`Installed golangci-lint into ${lintPath} in ${Date.now() - startedAt}ms`);
- return lintPath;
- });
-}
-exports.installLint = installLint;
-function installGo() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- process.env[`INPUT_GO-VERSION`] = `1`;
- yield main_1.run();
- core.info(`Installed Go in ${Date.now() - startedAt}ms`);
- });
-}
-exports.installGo = installGo;
-
-
-/***/ }),
-
-/***/ 669:
-/***/ (function(module) {
-
-module.exports = require("util");
-
-/***/ }),
-
-/***/ 672:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var _a;
-Object.defineProperty(exports, "__esModule", { value: true });
-const assert_1 = __webpack_require__(357);
-const fs = __webpack_require__(747);
-const path = __webpack_require__(622);
-_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
-exports.IS_WINDOWS = process.platform === 'win32';
-function exists(fsPath) {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- yield exports.stat(fsPath);
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- return false;
- }
- throw err;
- }
- return true;
- });
-}
-exports.exists = exists;
-function isDirectory(fsPath, useStat = false) {
- return __awaiter(this, void 0, void 0, function* () {
- const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
- return stats.isDirectory();
- });
-}
-exports.isDirectory = isDirectory;
-/**
- * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
- * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
- */
-function isRooted(p) {
- p = normalizeSeparators(p);
- if (!p) {
- throw new Error('isRooted() parameter "p" cannot be empty');
- }
- if (exports.IS_WINDOWS) {
- return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
- ); // e.g. C: or C:\hello
- }
- return p.startsWith('/');
-}
-exports.isRooted = isRooted;
-/**
- * Recursively create a directory at `fsPath`.
- *
- * This implementation is optimistic, meaning it attempts to create the full
- * path first, and backs up the path stack from there.
- *
- * @param fsPath The path to create
- * @param maxDepth The maximum recursion depth
- * @param depth The current recursion depth
- */
-function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
- return __awaiter(this, void 0, void 0, function* () {
- assert_1.ok(fsPath, 'a path argument must be provided');
- fsPath = path.resolve(fsPath);
- if (depth >= maxDepth)
- return exports.mkdir(fsPath);
- try {
- yield exports.mkdir(fsPath);
- return;
- }
- catch (err) {
- switch (err.code) {
- case 'ENOENT': {
- yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
- yield exports.mkdir(fsPath);
- return;
- }
- default: {
- let stats;
- try {
- stats = yield exports.stat(fsPath);
- }
- catch (err2) {
- throw err;
- }
- if (!stats.isDirectory())
- throw err;
- }
- }
- }
- });
-}
-exports.mkdirP = mkdirP;
-/**
- * Best effort attempt to determine whether a file exists and is executable.
- * @param filePath file path to check
- * @param extensions additional file extensions to try
- * @return if file exists and is executable, returns the file path. otherwise empty string.
- */
-function tryGetExecutablePath(filePath, extensions) {
- return __awaiter(this, void 0, void 0, function* () {
- let stats = undefined;
- try {
- // test file exists
- stats = yield exports.stat(filePath);
- }
- catch (err) {
- if (err.code !== 'ENOENT') {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
- }
- }
- if (stats && stats.isFile()) {
- if (exports.IS_WINDOWS) {
- // on Windows, test for valid extension
- const upperExt = path.extname(filePath).toUpperCase();
- if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
- return filePath;
- }
- }
- else {
- if (isUnixExecutable(stats)) {
- return filePath;
- }
- }
- }
- // try each extension
- const originalFilePath = filePath;
- for (const extension of extensions) {
- filePath = originalFilePath + extension;
- stats = undefined;
- try {
- stats = yield exports.stat(filePath);
- }
- catch (err) {
- if (err.code !== 'ENOENT') {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
- }
- }
- if (stats && stats.isFile()) {
- if (exports.IS_WINDOWS) {
- // preserve the case of the actual file (since an extension was appended)
- try {
- const directory = path.dirname(filePath);
- const upperName = path.basename(filePath).toUpperCase();
- for (const actualName of yield exports.readdir(directory)) {
- if (upperName === actualName.toUpperCase()) {
- filePath = path.join(directory, actualName);
- break;
- }
- }
- }
- catch (err) {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
- }
- return filePath;
- }
- else {
- if (isUnixExecutable(stats)) {
- return filePath;
- }
- }
- }
- }
- return '';
- });
-}
-exports.tryGetExecutablePath = tryGetExecutablePath;
-function normalizeSeparators(p) {
- p = p || '';
- if (exports.IS_WINDOWS) {
- // convert slashes on Windows
- p = p.replace(/\//g, '\\');
- // remove redundant slashes
- return p.replace(/\\\\+/g, '\\');
- }
- // remove redundant slashes
- return p.replace(/\/\/+/g, '/');
-}
-// on Mac/Linux, test the execute bit
-// R W X R W X R W X
-// 256 128 64 32 16 8 4 2 1
-function isUnixExecutable(stats) {
- return ((stats.mode & 1) > 0 ||
- ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
- ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
-}
-//# sourceMappingURL=io-util.js.map
-
-/***/ }),
-
-/***/ 722:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const restore_1 = __importDefault(__webpack_require__(206));
-const save_1 = __importDefault(__webpack_require__(781));
-const crypto = __importStar(__webpack_require__(417));
-const fs = __importStar(__webpack_require__(747));
-function checksumFile(hashName, path) {
- return new Promise((resolve, reject) => {
- const hash = crypto.createHash(hashName);
- const stream = fs.createReadStream(path);
- stream.on("error", err => reject(err));
- stream.on("data", chunk => hash.update(chunk));
- stream.on("end", () => resolve(hash.digest("hex")));
- });
-}
-const pathExists = (path) => __awaiter(void 0, void 0, void 0, function* () { return !!(yield fs.promises.stat(path).catch(() => false)); });
-const getLintCacheDir = () => `${process.env.HOME}/.cache/golangci-lint`;
-const getCacheDirs = () => {
- // Not existing dirs are ok here: it works.
- return [getLintCacheDir(), `${process.env.HOME}/.cache/go-build`, `${process.env.HOME}/go/pkg`];
-};
-const getIntervalKey = (invalidationIntervalDays) => {
- const now = new Date();
- const secondsSinceEpoch = now.getTime() / 1000;
- const intervalNumber = Math.floor(secondsSinceEpoch / (invalidationIntervalDays * 86400));
- return intervalNumber.toString();
-};
-function buildCacheKeys() {
- return __awaiter(this, void 0, void 0, function* () {
- const keys = [];
- let cacheKey = `golangci-lint.cache-`;
- keys.push(cacheKey);
- // Periodically invalidate a cache because a new code being added.
- // TODO: configure it via inputs.
- cacheKey += `${getIntervalKey(7)}-`;
- keys.push(cacheKey);
- if (yield pathExists(`go.mod`)) {
- // Add checksum to key to invalidate a cache when dependencies change.
- cacheKey += yield checksumFile(`sha1`, `go.mod`);
- }
- else {
- cacheKey += `nogomod`;
- }
- keys.push(cacheKey);
- return keys;
- });
-}
-function restoreCache() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- const keys = yield buildCacheKeys();
- const primaryKey = keys.pop();
- const restoreKeys = keys.reverse();
- core.info(`Primary analysis cache key is '${primaryKey}', restore keys are '${restoreKeys.join(` | `)}'`);
- process.env[`INPUT_RESTORE-KEYS`] = restoreKeys.join(`\n`);
- process.env.INPUT_KEY = primaryKey;
- process.env.INPUT_PATH = getCacheDirs().join(`\n`);
- // Tell golangci-lint to use our cache directory.
- process.env.GOLANGCI_LINT_CACHE = getLintCacheDir();
- yield restore_1.default();
- core.info(`Restored cache for golangci-lint from key '${primaryKey}' in ${Date.now() - startedAt}ms`);
- });
-}
-exports.restoreCache = restoreCache;
-function saveCache() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- const cacheDirs = getCacheDirs();
- process.env.INPUT_PATH = cacheDirs.join(`\n`);
- yield save_1.default();
- core.info(`Saved cache for golangci-lint from paths '${cacheDirs.join(`, `)}' in ${Date.now() - startedAt}ms`);
- });
-}
-exports.saveCache = saveCache;
-
-
-/***/ }),
-
-/***/ 728:
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-class SearchState {
- constructor(path, level) {
- this.path = path;
- this.level = level;
- }
-}
-exports.SearchState = SearchState;
-//# sourceMappingURL=internal-search-state.js.map
-
-/***/ }),
-
-/***/ 747:
-/***/ (function(module) {
-
-module.exports = require("fs");
-
-/***/ }),
-
-/***/ 781:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const path = __importStar(__webpack_require__(622));
-const cacheHttpClient = __importStar(__webpack_require__(342));
-const constants_1 = __webpack_require__(929);
-const tar_1 = __webpack_require__(570);
-const utils = __importStar(__webpack_require__(867));
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- if (!utils.isValidEvent()) {
- utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
- .getSupportedEvents()
- .join(", ")} events are supported at this time.`);
- return;
- }
- const state = utils.getCacheState();
- // Inputs are re-evaluted before the post action, so we want the original key used for restore
- const primaryKey = core.getState(constants_1.State.CacheKey);
- if (!primaryKey) {
- utils.logWarning(`Error retrieving key from state.`);
- return;
- }
- if (utils.isExactKeyMatch(primaryKey, state)) {
- core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
- return;
- }
- core.debug("Reserving Cache");
- const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
- if (cacheId == -1) {
- core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
- return;
- }
- core.debug(`Cache ID: ${cacheId}`);
- const cachePaths = yield utils.resolvePaths(core
- .getInput(constants_1.Inputs.Path, { required: true })
- .split("\n")
- .filter(x => x !== ""));
- core.debug("Cache Paths:");
- core.debug(`${JSON.stringify(cachePaths)}`);
- const archiveFolder = yield utils.createTempDirectory();
- const archivePath = path.join(archiveFolder, constants_1.CacheFilename);
- core.debug(`Archive Path: ${archivePath}`);
- yield tar_1.createTar(archiveFolder, cachePaths);
- const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
- const archiveFileSize = utils.getArchiveFileSize(archivePath);
- core.debug(`File Size: ${archiveFileSize}`);
- if (archiveFileSize > fileSizeLimit) {
- utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
- return;
- }
- core.debug(`Saving Cache (ID: ${cacheId})`);
- yield cacheHttpClient.saveCache(cacheId, archivePath);
- }
- catch (error) {
- utils.logWarning(error.message);
- }
- });
-}
-exports.default = run;
-
-
-/***/ }),
-
-/***/ 794:
-/***/ (function(module) {
-
-module.exports = require("stream");
-
-/***/ }),
-
-/***/ 826:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var rng = __webpack_require__(139);
-var bytesToUuid = __webpack_require__(105);
-
-function v4(options, buf, offset) {
- var i = buf && offset || 0;
-
- if (typeof(options) == 'string') {
- buf = options === 'binary' ? new Array(16) : null;
- options = null;
- }
- options = options || {};
-
- var rnds = options.random || (options.rng || rng)();
-
- // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
- rnds[6] = (rnds[6] & 0x0f) | 0x40;
- rnds[8] = (rnds[8] & 0x3f) | 0x80;
-
- // Copy bytes to buffer, if provided
- if (buf) {
- for (var ii = 0; ii < 16; ++ii) {
- buf[i + ii] = rnds[ii];
- }
- }
-
- return buf || bytesToUuid(rnds);
-}
-
-module.exports = v4;
-
-
-/***/ }),
-
-/***/ 835:
-/***/ (function(module) {
-
-module.exports = require("url");
-
/***/ }),
/***/ 867:
@@ -7668,7 +33174,7 @@ const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
const util = __importStar(__webpack_require__(669));
const uuidV4 = __importStar(__webpack_require__(826));
-const constants_1 = __webpack_require__(929);
+const constants_1 = __webpack_require__(573);
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
return __awaiter(this, void 0, void 0, function* () {
@@ -7783,6 +33289,1153 @@ function unlinkFile(path) {
exports.unlinkFile = unlinkFile;
+/***/ }),
+
+/***/ 881:
+/***/ (function(module) {
+
+"use strict";
+
+
+const isWin = process.platform === 'win32';
+
+function notFoundError(original, syscall) {
+ return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
+ code: 'ENOENT',
+ errno: 'ENOENT',
+ syscall: `${syscall} ${original.command}`,
+ path: original.command,
+ spawnargs: original.args,
+ });
+}
+
+function hookChildProcess(cp, parsed) {
+ if (!isWin) {
+ return;
+ }
+
+ const originalEmit = cp.emit;
+
+ cp.emit = function (name, arg1) {
+ // If emitting "exit" event and exit code is 1, we need to check if
+ // the command exists and emit an "error" instead
+ // See https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ if (name === 'exit') {
+ const err = verifyENOENT(arg1, parsed, 'spawn');
+
+ if (err) {
+ return originalEmit.call(cp, 'error', err);
+ }
+ }
+
+ return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
+ };
+}
+
+function verifyENOENT(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawn');
+ }
+
+ return null;
+}
+
+function verifyENOENTSync(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawnSync');
+ }
+
+ return null;
+}
+
+module.exports = {
+ hookChildProcess,
+ verifyENOENT,
+ verifyENOENTSync,
+ notFoundError,
+};
+
+
+/***/ }),
+
+/***/ 883:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the `TypeError` message for "Functions" methods. */
+var FUNC_ERROR_TEXT = 'Expected a function';
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0,
+ MAX_SAFE_INTEGER = 9007199254740991;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]',
+ symbolTag = '[object Symbol]';
+
+/** Used to match property names within property paths. */
+var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
+ reIsPlainProp = /^\w*$/,
+ reLeadingDot = /^\./,
+ rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to match backslashes in property paths. */
+var reEscapeChar = /\\(\\)?/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Used to detect unsigned integer values. */
+var reIsUint = /^(?:0|[1-9]\d*)$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var Symbol = root.Symbol,
+ splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ nativeCreate = getNative(Object, 'create');
+
+/** Used to convert symbols to primitives and strings. */
+var symbolProto = Symbol ? Symbol.prototype : undefined,
+ symbolToString = symbolProto ? symbolProto.toString : undefined;
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ * Assigns `value` to `key` of `object` if the existing value is not equivalent
+ * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * for equality comparisons.
+ *
+ * @private
+ * @param {Object} object The object to modify.
+ * @param {string} key The key of the property to assign.
+ * @param {*} value The value to assign.
+ */
+function assignValue(object, key, value) {
+ var objValue = object[key];
+ if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) ||
+ (value === undefined && !(key in object))) {
+ object[key] = value;
+ }
+}
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.set`.
+ *
+ * @private
+ * @param {Object} object The object to modify.
+ * @param {Array|string} path The path of the property to set.
+ * @param {*} value The value to set.
+ * @param {Function} [customizer] The function to customize path creation.
+ * @returns {Object} Returns `object`.
+ */
+function baseSet(object, path, value, customizer) {
+ if (!isObject(object)) {
+ return object;
+ }
+ path = isKey(path, object) ? [path] : castPath(path);
+
+ var index = -1,
+ length = path.length,
+ lastIndex = length - 1,
+ nested = object;
+
+ while (nested != null && ++index < length) {
+ var key = toKey(path[index]),
+ newValue = value;
+
+ if (index != lastIndex) {
+ var objValue = nested[key];
+ newValue = customizer ? customizer(objValue, key, nested) : undefined;
+ if (newValue === undefined) {
+ newValue = isObject(objValue)
+ ? objValue
+ : (isIndex(path[index + 1]) ? [] : {});
+ }
+ }
+ assignValue(nested, key, newValue);
+ nested = nested[key];
+ }
+ return object;
+}
+
+/**
+ * The base implementation of `_.toString` which doesn't convert nullish
+ * values to empty strings.
+ *
+ * @private
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ */
+function baseToString(value) {
+ // Exit early for strings to avoid a performance hit in some environments.
+ if (typeof value == 'string') {
+ return value;
+ }
+ if (isSymbol(value)) {
+ return symbolToString ? symbolToString.call(value) : '';
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Casts `value` to a path array if it's not one.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {Array} Returns the cast property path array.
+ */
+function castPath(value) {
+ return isArray(value) ? value : stringToPath(value);
+}
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is a valid array-like index.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
+ * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
+ */
+function isIndex(value, length) {
+ length = length == null ? MAX_SAFE_INTEGER : length;
+ return !!length &&
+ (typeof value == 'number' || reIsUint.test(value)) &&
+ (value > -1 && value % 1 == 0 && value < length);
+}
+
+/**
+ * Checks if `value` is a property name and not a property path.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {Object} [object] The object to query keys on.
+ * @returns {boolean} Returns `true` if `value` is a property name, else `false`.
+ */
+function isKey(value, object) {
+ if (isArray(value)) {
+ return false;
+ }
+ var type = typeof value;
+ if (type == 'number' || type == 'symbol' || type == 'boolean' ||
+ value == null || isSymbol(value)) {
+ return true;
+ }
+ return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
+ (object != null && value in Object(object));
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `string` to a property path array.
+ *
+ * @private
+ * @param {string} string The string to convert.
+ * @returns {Array} Returns the property path array.
+ */
+var stringToPath = memoize(function(string) {
+ string = toString(string);
+
+ var result = [];
+ if (reLeadingDot.test(string)) {
+ result.push('');
+ }
+ string.replace(rePropName, function(match, number, quote, string) {
+ result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
+ });
+ return result;
+});
+
+/**
+ * Converts `value` to a string key if it's not a string or symbol.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {string|symbol} Returns the key.
+ */
+function toKey(value) {
+ if (typeof value == 'string' || isSymbol(value)) {
+ return value;
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a function that memoizes the result of `func`. If `resolver` is
+ * provided, it determines the cache key for storing the result based on the
+ * arguments provided to the memoized function. By default, the first argument
+ * provided to the memoized function is used as the map cache key. The `func`
+ * is invoked with the `this` binding of the memoized function.
+ *
+ * **Note:** The cache is exposed as the `cache` property on the memoized
+ * function. Its creation may be customized by replacing the `_.memoize.Cache`
+ * constructor with one whose instances implement the
+ * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
+ * method interface of `delete`, `get`, `has`, and `set`.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Function
+ * @param {Function} func The function to have its output memoized.
+ * @param {Function} [resolver] The function to resolve the cache key.
+ * @returns {Function} Returns the new memoized function.
+ * @example
+ *
+ * var object = { 'a': 1, 'b': 2 };
+ * var other = { 'c': 3, 'd': 4 };
+ *
+ * var values = _.memoize(_.values);
+ * values(object);
+ * // => [1, 2]
+ *
+ * values(other);
+ * // => [3, 4]
+ *
+ * object.a = 2;
+ * values(object);
+ * // => [1, 2]
+ *
+ * // Modify the result cache.
+ * values.cache.set(object, ['a', 'b']);
+ * values(object);
+ * // => ['a', 'b']
+ *
+ * // Replace `_.memoize.Cache`.
+ * _.memoize.Cache = WeakMap;
+ */
+function memoize(func, resolver) {
+ if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
+ throw new TypeError(FUNC_ERROR_TEXT);
+ }
+ var memoized = function() {
+ var args = arguments,
+ key = resolver ? resolver.apply(this, args) : args[0],
+ cache = memoized.cache;
+
+ if (cache.has(key)) {
+ return cache.get(key);
+ }
+ var result = func.apply(this, args);
+ memoized.cache = cache.set(key, result);
+ return result;
+ };
+ memoized.cache = new (memoize.Cache || MapCache);
+ return memoized;
+}
+
+// Assign cache to `_.memoize`.
+memoize.Cache = MapCache;
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as an `Array` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an array, else `false`.
+ * @example
+ *
+ * _.isArray([1, 2, 3]);
+ * // => true
+ *
+ * _.isArray(document.body.children);
+ * // => false
+ *
+ * _.isArray('abc');
+ * // => false
+ *
+ * _.isArray(_.noop);
+ * // => false
+ */
+var isArray = Array.isArray;
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * Checks if `value` is object-like. A value is object-like if it's not `null`
+ * and has a `typeof` result of "object".
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
+ * @example
+ *
+ * _.isObjectLike({});
+ * // => true
+ *
+ * _.isObjectLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isObjectLike(_.noop);
+ * // => false
+ *
+ * _.isObjectLike(null);
+ * // => false
+ */
+function isObjectLike(value) {
+ return !!value && typeof value == 'object';
+}
+
+/**
+ * Checks if `value` is classified as a `Symbol` primitive or object.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
+ * @example
+ *
+ * _.isSymbol(Symbol.iterator);
+ * // => true
+ *
+ * _.isSymbol('abc');
+ * // => false
+ */
+function isSymbol(value) {
+ return typeof value == 'symbol' ||
+ (isObjectLike(value) && objectToString.call(value) == symbolTag);
+}
+
+/**
+ * Converts `value` to a string. An empty string is returned for `null`
+ * and `undefined` values. The sign of `-0` is preserved.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ * @example
+ *
+ * _.toString(null);
+ * // => ''
+ *
+ * _.toString(-0);
+ * // => '-0'
+ *
+ * _.toString([1, 2, 3]);
+ * // => '1,2,3'
+ */
+function toString(value) {
+ return value == null ? '' : baseToString(value);
+}
+
+/**
+ * Sets the value at `path` of `object`. If a portion of `path` doesn't exist,
+ * it's created. Arrays are created for missing index properties while objects
+ * are created for all other missing properties. Use `_.setWith` to customize
+ * `path` creation.
+ *
+ * **Note:** This method mutates `object`.
+ *
+ * @static
+ * @memberOf _
+ * @since 3.7.0
+ * @category Object
+ * @param {Object} object The object to modify.
+ * @param {Array|string} path The path of the property to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns `object`.
+ * @example
+ *
+ * var object = { 'a': [{ 'b': { 'c': 3 } }] };
+ *
+ * _.set(object, 'a[0].b.c', 4);
+ * console.log(object.a[0].b.c);
+ * // => 4
+ *
+ * _.set(object, ['x', '0', 'y', 'z'], 5);
+ * console.log(object.x[0].y.z);
+ * // => 5
+ */
+function set(object, path, value) {
+ return object == null ? object : baseSet(object, path, value);
+}
+
+module.exports = set;
+
+
+/***/ }),
+
+/***/ 890:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationPlugin;
+
+const { createTokenAuth } = __webpack_require__(813);
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const beforeRequest = __webpack_require__(414);
+const requestError = __webpack_require__(446);
+const validate = __webpack_require__(538);
+const withAuthorizationPrefix = __webpack_require__(85);
+
+const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation));
+const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation));
+
+function authenticationPlugin(octokit, options) {
+ // If `options.authStrategy` is set then use it and pass in `options.auth`
+ if (options.authStrategy) {
+ const auth = options.authStrategy(options.auth);
+ octokit.hook.wrap("request", auth.hook);
+ octokit.auth = auth;
+ return;
+ }
+
+ // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
+ // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred.
+ if (!options.auth) {
+ octokit.auth = () =>
+ Promise.resolve({
+ type: "unauthenticated"
+ });
+ return;
+ }
+
+ const isBasicAuthString =
+ typeof options.auth === "string" &&
+ /^basic/.test(withAuthorizationPrefix(options.auth));
+
+ // If only `options.auth` is set to a string, use the default token authentication strategy.
+ if (typeof options.auth === "string" && !isBasicAuthString) {
+ const auth = createTokenAuth(options.auth);
+ octokit.hook.wrap("request", auth.hook);
+ octokit.auth = auth;
+ return;
+ }
+
+ // Otherwise log a deprecation message
+ const [deprecationMethod, deprecationMessapge] = isBasicAuthString
+ ? [
+ deprecateAuthBasic,
+ 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)'
+ ]
+ : [
+ deprecateAuthObject,
+ 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)'
+ ];
+ deprecationMethod(
+ octokit.log,
+ new Deprecation("[@octokit/rest] " + deprecationMessapge)
+ );
+
+ octokit.auth = () =>
+ Promise.resolve({
+ type: "deprecated",
+ message: deprecationMessapge
+ });
+
+ validate(options.auth);
+
+ const state = {
+ octokit,
+ auth: options.auth
+ };
+
+ octokit.hook.before("request", beforeRequest.bind(null, state));
+ octokit.hook.error("request", requestError.bind(null, state));
+}
+
+
/***/ }),
/***/ 896:
@@ -7803,6 +34456,99 @@ var isArray = Array.isArray || function (xs) {
};
+/***/ }),
+
+/***/ 898:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var request = __webpack_require__(753);
+var universalUserAgent = __webpack_require__(796);
+
+const VERSION = "4.3.1";
+
+class GraphqlError extends Error {
+ constructor(request, response) {
+ const message = response.data.errors[0].message;
+ super(message);
+ Object.assign(this, response.data);
+ this.name = "GraphqlError";
+ this.request = request; // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ }
+
+}
+
+const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query"];
+function graphql(request, query, options) {
+ options = typeof query === "string" ? options = Object.assign({
+ query
+ }, options) : options = query;
+ const requestOptions = Object.keys(options).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = options[key];
+ return result;
+ }
+
+ if (!result.variables) {
+ result.variables = {};
+ }
+
+ result.variables[key] = options[key];
+ return result;
+ }, {});
+ return request(requestOptions).then(response => {
+ if (response.data.errors) {
+ throw new GraphqlError(requestOptions, {
+ data: response.data
+ });
+ }
+
+ return response.data.data;
+ });
+}
+
+function withDefaults(request$1, newDefaults) {
+ const newRequest = request$1.defaults(newDefaults);
+
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: request.request.endpoint
+ });
+}
+
+const graphql$1 = withDefaults(request.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ },
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+
+exports.graphql = graphql$1;
+exports.withCustomRequest = withCustomRequest;
+//# sourceMappingURL=index.js.map
+
+
/***/ }),
/***/ 909:
@@ -7815,6 +34561,51 @@ const run_1 = __webpack_require__(180);
run_1.postRun();
+/***/ }),
+
+/***/ 916:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+const VERSION = "1.0.0";
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function requestLog(octokit) {
+ octokit.hook.wrap("request", (request, options) => {
+ octokit.log.debug("request", options);
+ const start = Date.now();
+ const requestOptions = octokit.request.endpoint.parse(options);
+ const path = requestOptions.url.replace(options.baseUrl, "");
+ return request(options).then(response => {
+ octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);
+ return response;
+ }).catch(error => {
+ octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);
+ throw error;
+ });
+ });
+}
+requestLog.VERSION = VERSION;
+
+exports.requestLog = requestLog;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 919:
+/***/ (function(module) {
+
+module.exports = {"_args":[["@octokit/rest@16.43.1","/Users/denis-isaev/golangci-lint-action"]],"_from":"@octokit/rest@16.43.1","_id":"@octokit/rest@16.43.1","_inBundle":false,"_integrity":"sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==","_location":"/@actions/github/@octokit/rest","_phantomChildren":{},"_requested":{"type":"version","registry":true,"raw":"@octokit/rest@16.43.1","name":"@octokit/rest","escapedName":"@octokit%2frest","scope":"@octokit","rawSpec":"16.43.1","saveSpec":null,"fetchSpec":"16.43.1"},"_requiredBy":["/@actions/github"],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz","_spec":"16.43.1","_where":"/Users/denis-isaev/golangci-lint-action","author":{"name":"Gregor Martynus","url":"https://github.com/gr2m"},"bugs":{"url":"https://github.com/octokit/rest.js/issues"},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"description":"GitHub REST API client for Node.js","devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^5.1.2","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"files":["index.js","index.d.ts","lib","plugins"],"homepage":"https://github.com/octokit/rest.js#readme","keywords":["octokit","github","rest","api-client"],"license":"MIT","name":"@octokit/rest","nyc":{"ignore":["test"]},"publishConfig":{"access":"public"},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"repository":{"type":"git","url":"git+https://github.com/octokit/rest.js.git"},"scripts":{"build":"npm-run-all build:*","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","build:ts":"npm run -s update-endpoints:typescript","coverage":"nyc report --reporter=html && open coverage/index.html","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","lint":"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","lint:fix":"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","prebuild:browser":"mkdirp dist/","pretest":"npm run -s lint","prevalidate:ts":"npm run -s build:ts","start-fixtures-server":"octokit-fixtures-server","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts"},"types":"index.d.ts","version":"16.43.1"};
+
/***/ }),
/***/ 923:
@@ -8056,38 +34847,37 @@ exports.Pattern = Pattern;
/***/ }),
/***/ 929:
-/***/ (function(__unusedmodule, exports) {
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasNextPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasNextPage (link) {
+ deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).next
+}
+
+
+/***/ }),
+
+/***/ 948:
+/***/ (function(module) {
"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var Inputs;
-(function (Inputs) {
- Inputs["Key"] = "key";
- Inputs["Path"] = "path";
- Inputs["RestoreKeys"] = "restore-keys";
-})(Inputs = exports.Inputs || (exports.Inputs = {}));
-var Outputs;
-(function (Outputs) {
- Outputs["CacheHit"] = "cache-hit";
-})(Outputs = exports.Outputs || (exports.Outputs = {}));
-var State;
-(function (State) {
- State["CacheKey"] = "CACHE_KEY";
- State["CacheResult"] = "CACHE_RESULT";
-})(State = exports.State || (exports.State = {}));
-var Events;
-(function (Events) {
- Events["Key"] = "GITHUB_EVENT_NAME";
- Events["Push"] = "push";
- Events["PullRequest"] = "pull_request";
-})(Events = exports.Events || (exports.Events = {}));
-exports.CacheFilename = "cache.tgz";
-// Socket timeout in milliseconds during download. If no traffic is received
-// over the socket during this period, the socket is destroyed and the download
-// is aborted.
-exports.SocketTimeout = 5000;
+/**
+ * Tries to execute a function and discards any error that occurs.
+ * @param {Function} fn - Function that might or might not throw an error.
+ * @returns {?*} Return-value of the function when no error occurred.
+ */
+module.exports = function(fn) {
+
+ try { return fn() } catch (e) {}
+
+}
/***/ }),
@@ -8155,6 +34945,521 @@ function checkBypass(reqUrl) {
exports.checkBypass = checkBypass;
+/***/ }),
+
+/***/ 953:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationPlugin;
+
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
+
+const authenticate = __webpack_require__(790);
+const beforeRequest = __webpack_require__(140);
+const requestError = __webpack_require__(240);
+
+function authenticationPlugin(octokit, options) {
+ if (options.auth) {
+ octokit.authenticate = () => {
+ deprecateAuthenticate(
+ octokit.log,
+ new Deprecation(
+ '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor'
+ )
+ );
+ };
+ return;
+ }
+ const state = {
+ octokit,
+ auth: false
+ };
+ octokit.authenticate = authenticate.bind(null, state);
+ octokit.hook.before("request", beforeRequest.bind(null, state));
+ octokit.hook.error("request", requestError.bind(null, state));
+}
+
+
+/***/ }),
+
+/***/ 955:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const path = __webpack_require__(622);
+const childProcess = __webpack_require__(129);
+const crossSpawn = __webpack_require__(20);
+const stripEof = __webpack_require__(768);
+const npmRunPath = __webpack_require__(621);
+const isStream = __webpack_require__(323);
+const _getStream = __webpack_require__(145);
+const pFinally = __webpack_require__(697);
+const onExit = __webpack_require__(260);
+const errname = __webpack_require__(427);
+const stdio = __webpack_require__(168);
+
+const TEN_MEGABYTES = 1000 * 1000 * 10;
+
+function handleArgs(cmd, args, opts) {
+ let parsed;
+
+ opts = Object.assign({
+ extendEnv: true,
+ env: {}
+ }, opts);
+
+ if (opts.extendEnv) {
+ opts.env = Object.assign({}, process.env, opts.env);
+ }
+
+ if (opts.__winShell === true) {
+ delete opts.__winShell;
+ parsed = {
+ command: cmd,
+ args,
+ options: opts,
+ file: cmd,
+ original: {
+ cmd,
+ args
+ }
+ };
+ } else {
+ parsed = crossSpawn._parse(cmd, args, opts);
+ }
+
+ opts = Object.assign({
+ maxBuffer: TEN_MEGABYTES,
+ buffer: true,
+ stripEof: true,
+ preferLocal: true,
+ localDir: parsed.options.cwd || process.cwd(),
+ encoding: 'utf8',
+ reject: true,
+ cleanup: true
+ }, parsed.options);
+
+ opts.stdio = stdio(opts);
+
+ if (opts.preferLocal) {
+ opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
+ }
+
+ if (opts.detached) {
+ // #115
+ opts.cleanup = false;
+ }
+
+ if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') {
+ // #116
+ parsed.args.unshift('/q');
+ }
+
+ return {
+ cmd: parsed.command,
+ args: parsed.args,
+ opts,
+ parsed
+ };
+}
+
+function handleInput(spawned, input) {
+ if (input === null || input === undefined) {
+ return;
+ }
+
+ if (isStream(input)) {
+ input.pipe(spawned.stdin);
+ } else {
+ spawned.stdin.end(input);
+ }
+}
+
+function handleOutput(opts, val) {
+ if (val && opts.stripEof) {
+ val = stripEof(val);
+ }
+
+ return val;
+}
+
+function handleShell(fn, cmd, opts) {
+ let file = '/bin/sh';
+ let args = ['-c', cmd];
+
+ opts = Object.assign({}, opts);
+
+ if (process.platform === 'win32') {
+ opts.__winShell = true;
+ file = process.env.comspec || 'cmd.exe';
+ args = ['/s', '/c', `"${cmd}"`];
+ opts.windowsVerbatimArguments = true;
+ }
+
+ if (opts.shell) {
+ file = opts.shell;
+ delete opts.shell;
+ }
+
+ return fn(file, args, opts);
+}
+
+function getStream(process, stream, {encoding, buffer, maxBuffer}) {
+ if (!process[stream]) {
+ return null;
+ }
+
+ let ret;
+
+ if (!buffer) {
+ // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
+ ret = new Promise((resolve, reject) => {
+ process[stream]
+ .once('end', resolve)
+ .once('error', reject);
+ });
+ } else if (encoding) {
+ ret = _getStream(process[stream], {
+ encoding,
+ maxBuffer
+ });
+ } else {
+ ret = _getStream.buffer(process[stream], {maxBuffer});
+ }
+
+ return ret.catch(err => {
+ err.stream = stream;
+ err.message = `${stream} ${err.message}`;
+ throw err;
+ });
+}
+
+function makeError(result, options) {
+ const {stdout, stderr} = result;
+
+ let err = result.error;
+ const {code, signal} = result;
+
+ const {parsed, joinedCmd} = options;
+ const timedOut = options.timedOut || false;
+
+ if (!err) {
+ let output = '';
+
+ if (Array.isArray(parsed.opts.stdio)) {
+ if (parsed.opts.stdio[2] !== 'inherit') {
+ output += output.length > 0 ? stderr : `\n${stderr}`;
+ }
+
+ if (parsed.opts.stdio[1] !== 'inherit') {
+ output += `\n${stdout}`;
+ }
+ } else if (parsed.opts.stdio !== 'inherit') {
+ output = `\n${stderr}${stdout}`;
+ }
+
+ err = new Error(`Command failed: ${joinedCmd}${output}`);
+ err.code = code < 0 ? errname(code) : code;
+ }
+
+ err.stdout = stdout;
+ err.stderr = stderr;
+ err.failed = true;
+ err.signal = signal || null;
+ err.cmd = joinedCmd;
+ err.timedOut = timedOut;
+
+ return err;
+}
+
+function joinCmd(cmd, args) {
+ let joinedCmd = cmd;
+
+ if (Array.isArray(args) && args.length > 0) {
+ joinedCmd += ' ' + args.join(' ');
+ }
+
+ return joinedCmd;
+}
+
+module.exports = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const {encoding, buffer, maxBuffer} = parsed.opts;
+ const joinedCmd = joinCmd(cmd, args);
+
+ let spawned;
+ try {
+ spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+
+ let removeExitHandler;
+ if (parsed.opts.cleanup) {
+ removeExitHandler = onExit(() => {
+ spawned.kill();
+ });
+ }
+
+ let timeoutId = null;
+ let timedOut = false;
+
+ const cleanup = () => {
+ if (timeoutId) {
+ clearTimeout(timeoutId);
+ timeoutId = null;
+ }
+
+ if (removeExitHandler) {
+ removeExitHandler();
+ }
+ };
+
+ if (parsed.opts.timeout > 0) {
+ timeoutId = setTimeout(() => {
+ timeoutId = null;
+ timedOut = true;
+ spawned.kill(parsed.opts.killSignal);
+ }, parsed.opts.timeout);
+ }
+
+ const processDone = new Promise(resolve => {
+ spawned.on('exit', (code, signal) => {
+ cleanup();
+ resolve({code, signal});
+ });
+
+ spawned.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+
+ if (spawned.stdin) {
+ spawned.stdin.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+ }
+ });
+
+ function destroy() {
+ if (spawned.stdout) {
+ spawned.stdout.destroy();
+ }
+
+ if (spawned.stderr) {
+ spawned.stderr.destroy();
+ }
+ }
+
+ const handlePromise = () => pFinally(Promise.all([
+ processDone,
+ getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}),
+ getStream(spawned, 'stderr', {encoding, buffer, maxBuffer})
+ ]).then(arr => {
+ const result = arr[0];
+ result.stdout = arr[1];
+ result.stderr = arr[2];
+
+ if (result.error || result.code !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed,
+ timedOut
+ });
+
+ // TODO: missing some timeout logic for killed
+ // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
+ // err.killed = spawned.killed || killed;
+ err.killed = err.killed || spawned.killed;
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ killed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+ }), destroy);
+
+ crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
+
+ handleInput(spawned, parsed.opts.input);
+
+ spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
+ spawned.catch = onrejected => handlePromise().catch(onrejected);
+
+ return spawned;
+};
+
+// TODO: set `stderr: 'ignore'` when that option is implemented
+module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout);
+
+// TODO: set `stdout: 'ignore'` when that option is implemented
+module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr);
+
+module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts);
+
+module.exports.sync = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const joinedCmd = joinCmd(cmd, args);
+
+ if (isStream(parsed.opts.input)) {
+ throw new TypeError('The `input` option cannot be a stream in sync mode');
+ }
+
+ const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
+ result.code = result.status;
+
+ if (result.error || result.status !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed
+ });
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+};
+
+module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts);
+
+
+/***/ }),
+
+/***/ 966:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const {PassThrough} = __webpack_require__(413);
+
+module.exports = options => {
+ options = Object.assign({}, options);
+
+ const {array} = options;
+ let {encoding} = options;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || buffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (buffer) {
+ encoding = null;
+ }
+
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ stream.on('data', chunk => {
+ ret.push(chunk);
+
+ if (objectMode) {
+ len = ret.length;
+ } else {
+ len += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return ret;
+ }
+
+ return buffer ? Buffer.concat(ret, len) : ret.join('');
+ };
+
+ stream.getBufferedLength = () => len;
+
+ return stream;
+};
+
+
+/***/ }),
+
+/***/ 969:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var wrappy = __webpack_require__(11)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
/***/ }),
/***/ 972:
@@ -8438,7 +35743,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-const tr = __importStar(__webpack_require__(9));
+const tr = __importStar(__webpack_require__(686));
/**
* Exec a command.
* Output will be streamed to the live console.
diff --git a/dist/run/index.js b/dist/run/index.js
index 1dcb0c4..174d6d0 100644
--- a/dist/run/index.js
+++ b/dist/run/index.js
@@ -340,610 +340,565 @@ function copyFile(srcFile, destFile, force) {
/***/ }),
-/***/ 9:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/***/ 2:
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+const os = __webpack_require__(87);
+const macosRelease = __webpack_require__(118);
+const winRelease = __webpack_require__(49);
+
+const osName = (platform, release) => {
+ if (!platform && release) {
+ throw new Error('You can\'t specify a `release` without specifying `platform`');
+ }
+
+ platform = platform || os.platform();
+
+ let id;
+
+ if (platform === 'darwin') {
+ if (!release && os.platform() === 'darwin') {
+ release = os.release();
+ }
+
+ const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS';
+ id = release ? macosRelease(release).name : '';
+ return prefix + (id ? ' ' + id : '');
+ }
+
+ if (platform === 'linux') {
+ if (!release && os.platform() === 'linux') {
+ release = os.release();
+ }
+
+ id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : '';
+ return 'Linux' + (id ? ' ' + id : '');
+ }
+
+ if (platform === 'win32') {
+ if (!release && os.platform() === 'win32') {
+ release = os.release();
+ }
+
+ id = release ? winRelease(release) : '';
+ return 'Windows' + (id ? ' ' + id : '');
+ }
+
+ return platform;
};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
+
+module.exports = osName;
+
+
+/***/ }),
+
+/***/ 9:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(969);
+
+var noop = function() {};
+
+var isRequest = function(stream) {
+ return stream.setHeader && typeof stream.abort === 'function';
};
-Object.defineProperty(exports, "__esModule", { value: true });
-const os = __importStar(__webpack_require__(87));
-const events = __importStar(__webpack_require__(614));
-const child = __importStar(__webpack_require__(129));
-const path = __importStar(__webpack_require__(622));
-const io = __importStar(__webpack_require__(1));
-const ioUtil = __importStar(__webpack_require__(672));
-/* eslint-disable @typescript-eslint/unbound-method */
-const IS_WINDOWS = process.platform === 'win32';
-/*
- * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
- */
-class ToolRunner extends events.EventEmitter {
- constructor(toolPath, args, options) {
- super();
- if (!toolPath) {
- throw new Error("Parameter 'toolPath' cannot be null or empty.");
- }
- this.toolPath = toolPath;
- this.args = args || [];
- this.options = options || {};
+
+var isChildProcess = function(stream) {
+ return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
+};
+
+var eos = function(stream, opts, callback) {
+ if (typeof opts === 'function') return eos(stream, null, opts);
+ if (!opts) opts = {};
+
+ callback = once(callback || noop);
+
+ var ws = stream._writableState;
+ var rs = stream._readableState;
+ var readable = opts.readable || (opts.readable !== false && stream.readable);
+ var writable = opts.writable || (opts.writable !== false && stream.writable);
+ var cancelled = false;
+
+ var onlegacyfinish = function() {
+ if (!stream.writable) onfinish();
+ };
+
+ var onfinish = function() {
+ writable = false;
+ if (!readable) callback.call(stream);
+ };
+
+ var onend = function() {
+ readable = false;
+ if (!writable) callback.call(stream);
+ };
+
+ var onexit = function(exitCode) {
+ callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
+ };
+
+ var onerror = function(err) {
+ callback.call(stream, err);
+ };
+
+ var onclose = function() {
+ process.nextTick(onclosenexttick);
+ };
+
+ var onclosenexttick = function() {
+ if (cancelled) return;
+ if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
+ if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
+ };
+
+ var onrequest = function() {
+ stream.req.on('finish', onfinish);
+ };
+
+ if (isRequest(stream)) {
+ stream.on('complete', onfinish);
+ stream.on('abort', onclose);
+ if (stream.req) onrequest();
+ else stream.on('request', onrequest);
+ } else if (writable && !ws) { // legacy streams
+ stream.on('end', onlegacyfinish);
+ stream.on('close', onlegacyfinish);
+ }
+
+ if (isChildProcess(stream)) stream.on('exit', onexit);
+
+ stream.on('end', onend);
+ stream.on('finish', onfinish);
+ if (opts.error !== false) stream.on('error', onerror);
+ stream.on('close', onclose);
+
+ return function() {
+ cancelled = true;
+ stream.removeListener('complete', onfinish);
+ stream.removeListener('abort', onclose);
+ stream.removeListener('request', onrequest);
+ if (stream.req) stream.req.removeListener('finish', onfinish);
+ stream.removeListener('end', onlegacyfinish);
+ stream.removeListener('close', onlegacyfinish);
+ stream.removeListener('finish', onfinish);
+ stream.removeListener('exit', onexit);
+ stream.removeListener('end', onend);
+ stream.removeListener('error', onerror);
+ stream.removeListener('close', onclose);
+ };
+};
+
+module.exports = eos;
+
+
+/***/ }),
+
+/***/ 11:
+/***/ (function(module) {
+
+// Returns a wrapper function that returns a wrapped callback
+// The wrapper function should do some stuff, and return a
+// presumably different callback function.
+// This makes sure that own properties are retained, so that
+// decorations and such are not lost along the way.
+module.exports = wrappy
+function wrappy (fn, cb) {
+ if (fn && cb) return wrappy(fn)(cb)
+
+ if (typeof fn !== 'function')
+ throw new TypeError('need wrapper function')
+
+ Object.keys(fn).forEach(function (k) {
+ wrapper[k] = fn[k]
+ })
+
+ return wrapper
+
+ function wrapper() {
+ var args = new Array(arguments.length)
+ for (var i = 0; i < args.length; i++) {
+ args[i] = arguments[i]
}
- _debug(message) {
- if (this.options.listeners && this.options.listeners.debug) {
- this.options.listeners.debug(message);
- }
- }
- _getCommandString(options, noPrefix) {
- const toolPath = this._getSpawnFileName();
- const args = this._getSpawnArgs(options);
- let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
- if (IS_WINDOWS) {
- // Windows + cmd file
- if (this._isCmdFile()) {
- cmd += toolPath;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- // Windows + verbatim
- else if (options.windowsVerbatimArguments) {
- cmd += `"${toolPath}"`;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- // Windows (regular)
- else {
- cmd += this._windowsQuoteCmdArg(toolPath);
- for (const a of args) {
- cmd += ` ${this._windowsQuoteCmdArg(a)}`;
- }
- }
- }
- else {
- // OSX/Linux - this can likely be improved with some form of quoting.
- // creating processes on Unix is fundamentally different than Windows.
- // on Unix, execvp() takes an arg array.
- cmd += toolPath;
- for (const a of args) {
- cmd += ` ${a}`;
- }
- }
- return cmd;
- }
- _processLineBuffer(data, strBuffer, onLine) {
- try {
- let s = strBuffer + data.toString();
- let n = s.indexOf(os.EOL);
- while (n > -1) {
- const line = s.substring(0, n);
- onLine(line);
- // the rest of the string ...
- s = s.substring(n + os.EOL.length);
- n = s.indexOf(os.EOL);
- }
- strBuffer = s;
- }
- catch (err) {
- // streaming lines to console is best effort. Don't fail a build.
- this._debug(`error processing line. Failed with error ${err}`);
- }
- }
- _getSpawnFileName() {
- if (IS_WINDOWS) {
- if (this._isCmdFile()) {
- return process.env['COMSPEC'] || 'cmd.exe';
- }
- }
- return this.toolPath;
- }
- _getSpawnArgs(options) {
- if (IS_WINDOWS) {
- if (this._isCmdFile()) {
- let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
- for (const a of this.args) {
- argline += ' ';
- argline += options.windowsVerbatimArguments
- ? a
- : this._windowsQuoteCmdArg(a);
- }
- argline += '"';
- return [argline];
- }
- }
- return this.args;
- }
- _endsWith(str, end) {
- return str.endsWith(end);
- }
- _isCmdFile() {
- const upperToolPath = this.toolPath.toUpperCase();
- return (this._endsWith(upperToolPath, '.CMD') ||
- this._endsWith(upperToolPath, '.BAT'));
- }
- _windowsQuoteCmdArg(arg) {
- // for .exe, apply the normal quoting rules that libuv applies
- if (!this._isCmdFile()) {
- return this._uvQuoteCmdArg(arg);
- }
- // otherwise apply quoting rules specific to the cmd.exe command line parser.
- // the libuv rules are generic and are not designed specifically for cmd.exe
- // command line parser.
- //
- // for a detailed description of the cmd.exe command line parser, refer to
- // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
- // need quotes for empty arg
- if (!arg) {
- return '""';
- }
- // determine whether the arg needs to be quoted
- const cmdSpecialChars = [
- ' ',
- '\t',
- '&',
- '(',
- ')',
- '[',
- ']',
- '{',
- '}',
- '^',
- '=',
- ';',
- '!',
- "'",
- '+',
- ',',
- '`',
- '~',
- '|',
- '<',
- '>',
- '"'
- ];
- let needsQuotes = false;
- for (const char of arg) {
- if (cmdSpecialChars.some(x => x === char)) {
- needsQuotes = true;
- break;
- }
- }
- // short-circuit if quotes not needed
- if (!needsQuotes) {
- return arg;
- }
- // the following quoting rules are very similar to the rules that by libuv applies.
- //
- // 1) wrap the string in quotes
- //
- // 2) double-up quotes - i.e. " => ""
- //
- // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
- // doesn't work well with a cmd.exe command line.
- //
- // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
- // for example, the command line:
- // foo.exe "myarg:""my val"""
- // is parsed by a .NET console app into an arg array:
- // [ "myarg:\"my val\"" ]
- // which is the same end result when applying libuv quoting rules. although the actual
- // command line from libuv quoting rules would look like:
- // foo.exe "myarg:\"my val\""
- //
- // 3) double-up slashes that precede a quote,
- // e.g. hello \world => "hello \world"
- // hello\"world => "hello\\""world"
- // hello\\"world => "hello\\\\""world"
- // hello world\ => "hello world\\"
- //
- // technically this is not required for a cmd.exe command line, or the batch argument parser.
- // the reasons for including this as a .cmd quoting rule are:
- //
- // a) this is optimized for the scenario where the argument is passed from the .cmd file to an
- // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
- //
- // b) it's what we've been doing previously (by deferring to node default behavior) and we
- // haven't heard any complaints about that aspect.
- //
- // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
- // escaped when used on the command line directly - even though within a .cmd file % can be escaped
- // by using %%.
- //
- // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
- // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
- //
- // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
- // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
- // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
- // to an external program.
- //
- // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
- // % can be escaped within a .cmd file.
- let reverse = '"';
- let quoteHit = true;
- for (let i = arg.length; i > 0; i--) {
- // walk the string in reverse
- reverse += arg[i - 1];
- if (quoteHit && arg[i - 1] === '\\') {
- reverse += '\\'; // double the slash
- }
- else if (arg[i - 1] === '"') {
- quoteHit = true;
- reverse += '"'; // double the quote
- }
- else {
- quoteHit = false;
- }
- }
- reverse += '"';
- return reverse
- .split('')
- .reverse()
- .join('');
- }
- _uvQuoteCmdArg(arg) {
- // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
- // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
- // is used.
- //
- // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
- // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
- // pasting copyright notice from Node within this function:
- //
- // Copyright Joyent, Inc. and other Node contributors. All rights reserved.
- //
- // Permission is hereby granted, free of charge, to any person obtaining a copy
- // of this software and associated documentation files (the "Software"), to
- // deal in the Software without restriction, including without limitation the
- // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- // sell copies of the Software, and to permit persons to whom the Software is
- // furnished to do so, subject to the following conditions:
- //
- // The above copyright notice and this permission notice shall be included in
- // all copies or substantial portions of the Software.
- //
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
- // IN THE SOFTWARE.
- if (!arg) {
- // Need double quotation for empty argument
- return '""';
- }
- if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
- // No quotation needed
- return arg;
- }
- if (!arg.includes('"') && !arg.includes('\\')) {
- // No embedded double quotes or backslashes, so I can just wrap
- // quote marks around the whole thing.
- return `"${arg}"`;
- }
- // Expected input/output:
- // input : hello"world
- // output: "hello\"world"
- // input : hello""world
- // output: "hello\"\"world"
- // input : hello\world
- // output: hello\world
- // input : hello\\world
- // output: hello\\world
- // input : hello\"world
- // output: "hello\\\"world"
- // input : hello\\"world
- // output: "hello\\\\\"world"
- // input : hello world\
- // output: "hello world\\" - note the comment in libuv actually reads "hello world\"
- // but it appears the comment is wrong, it should be "hello world\\"
- let reverse = '"';
- let quoteHit = true;
- for (let i = arg.length; i > 0; i--) {
- // walk the string in reverse
- reverse += arg[i - 1];
- if (quoteHit && arg[i - 1] === '\\') {
- reverse += '\\';
- }
- else if (arg[i - 1] === '"') {
- quoteHit = true;
- reverse += '\\';
- }
- else {
- quoteHit = false;
- }
- }
- reverse += '"';
- return reverse
- .split('')
- .reverse()
- .join('');
- }
- _cloneExecOptions(options) {
- options = options || {};
- const result = {
- cwd: options.cwd || process.cwd(),
- env: options.env || process.env,
- silent: options.silent || false,
- windowsVerbatimArguments: options.windowsVerbatimArguments || false,
- failOnStdErr: options.failOnStdErr || false,
- ignoreReturnCode: options.ignoreReturnCode || false,
- delay: options.delay || 10000
- };
- result.outStream = options.outStream || process.stdout;
- result.errStream = options.errStream || process.stderr;
- return result;
- }
- _getSpawnOptions(options, toolPath) {
- options = options || {};
- const result = {};
- result.cwd = options.cwd;
- result.env = options.env;
- result['windowsVerbatimArguments'] =
- options.windowsVerbatimArguments || this._isCmdFile();
- if (options.windowsVerbatimArguments) {
- result.argv0 = `"${toolPath}"`;
- }
- return result;
- }
- /**
- * Exec a tool.
- * Output will be streamed to the live console.
- * Returns promise with return code
- *
- * @param tool path to tool to exec
- * @param options optional exec options. See ExecOptions
- * @returns number
- */
- exec() {
- return __awaiter(this, void 0, void 0, function* () {
- // root the tool path if it is unrooted and contains relative pathing
- if (!ioUtil.isRooted(this.toolPath) &&
- (this.toolPath.includes('/') ||
- (IS_WINDOWS && this.toolPath.includes('\\')))) {
- // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
- this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
- }
- // if the tool is only a file name, then resolve it from the PATH
- // otherwise verify it exists (add extension on Windows if necessary)
- this.toolPath = yield io.which(this.toolPath, true);
- return new Promise((resolve, reject) => {
- this._debug(`exec tool: ${this.toolPath}`);
- this._debug('arguments:');
- for (const arg of this.args) {
- this._debug(` ${arg}`);
- }
- const optionsNonNull = this._cloneExecOptions(this.options);
- if (!optionsNonNull.silent && optionsNonNull.outStream) {
- optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
- }
- const state = new ExecState(optionsNonNull, this.toolPath);
- state.on('debug', (message) => {
- this._debug(message);
- });
- const fileName = this._getSpawnFileName();
- const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
- const stdbuffer = '';
- if (cp.stdout) {
- cp.stdout.on('data', (data) => {
- if (this.options.listeners && this.options.listeners.stdout) {
- this.options.listeners.stdout(data);
- }
- if (!optionsNonNull.silent && optionsNonNull.outStream) {
- optionsNonNull.outStream.write(data);
- }
- this._processLineBuffer(data, stdbuffer, (line) => {
- if (this.options.listeners && this.options.listeners.stdline) {
- this.options.listeners.stdline(line);
- }
- });
- });
- }
- const errbuffer = '';
- if (cp.stderr) {
- cp.stderr.on('data', (data) => {
- state.processStderr = true;
- if (this.options.listeners && this.options.listeners.stderr) {
- this.options.listeners.stderr(data);
- }
- if (!optionsNonNull.silent &&
- optionsNonNull.errStream &&
- optionsNonNull.outStream) {
- const s = optionsNonNull.failOnStdErr
- ? optionsNonNull.errStream
- : optionsNonNull.outStream;
- s.write(data);
- }
- this._processLineBuffer(data, errbuffer, (line) => {
- if (this.options.listeners && this.options.listeners.errline) {
- this.options.listeners.errline(line);
- }
- });
- });
- }
- cp.on('error', (err) => {
- state.processError = err.message;
- state.processExited = true;
- state.processClosed = true;
- state.CheckComplete();
- });
- cp.on('exit', (code) => {
- state.processExitCode = code;
- state.processExited = true;
- this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
- state.CheckComplete();
- });
- cp.on('close', (code) => {
- state.processExitCode = code;
- state.processExited = true;
- state.processClosed = true;
- this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
- state.CheckComplete();
- });
- state.on('done', (error, exitCode) => {
- if (stdbuffer.length > 0) {
- this.emit('stdline', stdbuffer);
- }
- if (errbuffer.length > 0) {
- this.emit('errline', errbuffer);
- }
- cp.removeAllListeners();
- if (error) {
- reject(error);
- }
- else {
- resolve(exitCode);
- }
- });
- if (this.options.input) {
- if (!cp.stdin) {
- throw new Error('child process missing stdin');
- }
- cp.stdin.end(this.options.input);
- }
- });
- });
+ var ret = fn.apply(this, args)
+ var cb = args[args.length-1]
+ if (typeof ret === 'function' && ret !== cb) {
+ Object.keys(cb).forEach(function (k) {
+ ret[k] = cb[k]
+ })
}
+ return ret
+ }
}
-exports.ToolRunner = ToolRunner;
-/**
- * Convert an arg string to an array of args. Handles escaping
- *
- * @param argString string of arguments
- * @returns string[] array of arguments
- */
-function argStringToArray(argString) {
- const args = [];
- let inQuotes = false;
- let escaped = false;
- let arg = '';
- function append(c) {
- // we only escape double quotes.
- if (escaped && c !== '"') {
- arg += '\\';
- }
- arg += c;
- escaped = false;
- }
- for (let i = 0; i < argString.length; i++) {
- const c = argString.charAt(i);
- if (c === '"') {
- if (!escaped) {
- inQuotes = !inQuotes;
- }
- else {
- append(c);
- }
- continue;
- }
- if (c === '\\' && escaped) {
- append(c);
- continue;
- }
- if (c === '\\' && inQuotes) {
- escaped = true;
- continue;
- }
- if (c === ' ' && !inQuotes) {
- if (arg.length > 0) {
- args.push(arg);
- arg = '';
- }
- continue;
- }
- append(c);
- }
- if (arg.length > 0) {
- args.push(arg.trim());
- }
- return args;
+
+
+/***/ }),
+
+/***/ 13:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const assert = __webpack_require__(357)
+const path = __webpack_require__(622)
+const fs = __webpack_require__(747)
+let glob = undefined
+try {
+ glob = __webpack_require__(402)
+} catch (_err) {
+ // treat glob as optional.
}
-exports.argStringToArray = argStringToArray;
-class ExecState extends events.EventEmitter {
- constructor(options, toolPath) {
- super();
- this.processClosed = false; // tracks whether the process has exited and stdio is closed
- this.processError = '';
- this.processExitCode = 0;
- this.processExited = false; // tracks whether the process has exited
- this.processStderr = false; // tracks whether stderr was written to
- this.delay = 10000; // 10 seconds
- this.done = false;
- this.timeout = null;
- if (!toolPath) {
- throw new Error('toolPath must not be empty');
- }
- this.options = options;
- this.toolPath = toolPath;
- if (options.delay) {
- this.delay = options.delay;
- }
- }
- CheckComplete() {
- if (this.done) {
- return;
- }
- if (this.processClosed) {
- this._setResult();
- }
- else if (this.processExited) {
- this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
- }
- }
- _debug(message) {
- this.emit('debug', message);
- }
- _setResult() {
- // determine whether there is an error
- let error;
- if (this.processExited) {
- if (this.processError) {
- error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
- }
- else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
- error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
- }
- else if (this.processStderr && this.options.failOnStdErr) {
- error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
- }
- }
- // clear the timeout
- if (this.timeout) {
- clearTimeout(this.timeout);
- this.timeout = null;
- }
- this.done = true;
- this.emit('done', error, this.processExitCode);
- }
- static HandleTimeout(state) {
- if (state.done) {
- return;
- }
- if (!state.processClosed && state.processExited) {
- const message = `The STDIO streams did not close within ${state.delay /
- 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
- state._debug(message);
- }
- state._setResult();
- }
+
+const defaultGlobOpts = {
+ nosort: true,
+ silent: true
}
-//# sourceMappingURL=toolrunner.js.map
+
+// for EMFILE handling
+let timeout = 0
+
+const isWindows = (process.platform === "win32")
+
+const defaults = options => {
+ const methods = [
+ 'unlink',
+ 'chmod',
+ 'stat',
+ 'lstat',
+ 'rmdir',
+ 'readdir'
+ ]
+ methods.forEach(m => {
+ options[m] = options[m] || fs[m]
+ m = m + 'Sync'
+ options[m] = options[m] || fs[m]
+ })
+
+ options.maxBusyTries = options.maxBusyTries || 3
+ options.emfileWait = options.emfileWait || 1000
+ if (options.glob === false) {
+ options.disableGlob = true
+ }
+ if (options.disableGlob !== true && glob === undefined) {
+ throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
+ }
+ options.disableGlob = options.disableGlob || false
+ options.glob = options.glob || defaultGlobOpts
+}
+
+const rimraf = (p, options, cb) => {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert.equal(typeof cb, 'function', 'rimraf: callback function required')
+ assert(options, 'rimraf: invalid options argument provided')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ defaults(options)
+
+ let busyTries = 0
+ let errState = null
+ let n = 0
+
+ const next = (er) => {
+ errState = errState || er
+ if (--n === 0)
+ cb(errState)
+ }
+
+ const afterGlob = (er, results) => {
+ if (er)
+ return cb(er)
+
+ n = results.length
+ if (n === 0)
+ return cb()
+
+ results.forEach(p => {
+ const CB = (er) => {
+ if (er) {
+ if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
+ busyTries < options.maxBusyTries) {
+ busyTries ++
+ // try again, with the same exact callback as this one.
+ return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.code === "EMFILE" && timeout < options.emfileWait) {
+ return setTimeout(() => rimraf_(p, options, CB), timeout ++)
+ }
+
+ // already gone
+ if (er.code === "ENOENT") er = null
+ }
+
+ timeout = 0
+ next(er)
+ }
+ rimraf_(p, options, CB)
+ })
+ }
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, (er, stat) => {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
+}
+
+// Two possible strategies.
+// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
+// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
+//
+// Both result in an extra syscall when you guess wrong. However, there
+// are likely far more normal files in the world than directories. This
+// is based on the assumption that a the average number of files per
+// directory is >= 1.
+//
+// If anyone ever complains about this, then I guess the strategy could
+// be made configurable somehow. But until then, YAGNI.
+const rimraf_ = (p, options, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // sunos lets the root user unlink directories, which is... weird.
+ // so we have to lstat here and make sure it's not a dir.
+ options.lstat(p, (er, st) => {
+ if (er && er.code === "ENOENT")
+ return cb(null)
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er && er.code === "EPERM" && isWindows)
+ fixWinEPERM(p, options, er, cb)
+
+ if (st && st.isDirectory())
+ return rmdir(p, options, er, cb)
+
+ options.unlink(p, er => {
+ if (er) {
+ if (er.code === "ENOENT")
+ return cb(null)
+ if (er.code === "EPERM")
+ return (isWindows)
+ ? fixWinEPERM(p, options, er, cb)
+ : rmdir(p, options, er, cb)
+ if (er.code === "EISDIR")
+ return rmdir(p, options, er, cb)
+ }
+ return cb(er)
+ })
+ })
+}
+
+const fixWinEPERM = (p, options, er, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.chmod(p, 0o666, er2 => {
+ if (er2)
+ cb(er2.code === "ENOENT" ? null : er)
+ else
+ options.stat(p, (er3, stats) => {
+ if (er3)
+ cb(er3.code === "ENOENT" ? null : er)
+ else if (stats.isDirectory())
+ rmdir(p, options, er, cb)
+ else
+ options.unlink(p, cb)
+ })
+ })
+}
+
+const fixWinEPERMSync = (p, options, er) => {
+ assert(p)
+ assert(options)
+
+ try {
+ options.chmodSync(p, 0o666)
+ } catch (er2) {
+ if (er2.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ let stats
+ try {
+ stats = options.statSync(p)
+ } catch (er3) {
+ if (er3.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ if (stats.isDirectory())
+ rmdirSync(p, options, er)
+ else
+ options.unlinkSync(p)
+}
+
+const rmdir = (p, options, originalEr, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
+ // if we guessed wrong, and it's not a directory, then
+ // raise the original error.
+ options.rmdir(p, er => {
+ if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
+ rmkids(p, options, cb)
+ else if (er && er.code === "ENOTDIR")
+ cb(originalEr)
+ else
+ cb(er)
+ })
+}
+
+const rmkids = (p, options, cb) => {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.readdir(p, (er, files) => {
+ if (er)
+ return cb(er)
+ let n = files.length
+ if (n === 0)
+ return options.rmdir(p, cb)
+ let errState
+ files.forEach(f => {
+ rimraf(path.join(p, f), options, er => {
+ if (errState)
+ return
+ if (er)
+ return cb(errState = er)
+ if (--n === 0)
+ options.rmdir(p, cb)
+ })
+ })
+ })
+}
+
+// this looks simpler, and is strictly *faster*, but will
+// tie up the JavaScript thread and fail on excessively
+// deep directory trees.
+const rimrafSync = (p, options) => {
+ options = options || {}
+ defaults(options)
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ let results
+
+ if (options.disableGlob || !glob.hasMagic(p)) {
+ results = [p]
+ } else {
+ try {
+ options.lstatSync(p)
+ results = [p]
+ } catch (er) {
+ results = glob.sync(p, options.glob)
+ }
+ }
+
+ if (!results.length)
+ return
+
+ for (let i = 0; i < results.length; i++) {
+ const p = results[i]
+
+ let st
+ try {
+ st = options.lstatSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er.code === "EPERM" && isWindows)
+ fixWinEPERMSync(p, options, er)
+ }
+
+ try {
+ // sunos lets the root user unlink directories, which is... weird.
+ if (st && st.isDirectory())
+ rmdirSync(p, options, null)
+ else
+ options.unlinkSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "EPERM")
+ return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
+ if (er.code !== "EISDIR")
+ throw er
+
+ rmdirSync(p, options, er)
+ }
+ }
+}
+
+const rmdirSync = (p, options, originalEr) => {
+ assert(p)
+ assert(options)
+
+ try {
+ options.rmdirSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "ENOTDIR")
+ throw originalEr
+ if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
+ rmkidsSync(p, options)
+ }
+}
+
+const rmkidsSync = (p, options) => {
+ assert(p)
+ assert(options)
+ options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
+
+ // We only end up here once we got ENOTEMPTY at least once, and
+ // at this point, we are guaranteed to have removed all the kids.
+ // So, we know that it won't be ENOENT or ENOTDIR or anything else.
+ // try really hard to delete stuff on windows, because it has a
+ // PROFOUNDLY annoying habit of not closing handles promptly when
+ // files are deleted, resulting in spurious ENOTEMPTY errors.
+ const retries = isWindows ? 100 : 1
+ let i = 0
+ do {
+ let threw = true
+ try {
+ const ret = options.rmdirSync(p, options)
+ threw = false
+ return ret
+ } finally {
+ if (++i < retries && threw)
+ continue
+ }
+ } while (true)
+}
+
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
/***/ }),
@@ -952,6 +907,1745 @@ class ExecState extends events.EventEmitter {
module.exports = require("tls");
+/***/ }),
+
+/***/ 18:
+/***/ (function() {
+
+eval("require")("encoding");
+
+
+/***/ }),
+
+/***/ 20:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const cp = __webpack_require__(129);
+const parse = __webpack_require__(568);
+const enoent = __webpack_require__(881);
+
+function spawn(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
+
+ // Hook into child process "exit" event to emit an error if the command
+ // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ enoent.hookChildProcess(spawned, parsed);
+
+ return spawned;
+}
+
+function spawnSync(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
+
+ // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
+
+ return result;
+}
+
+module.exports = spawn;
+module.exports.spawn = spawn;
+module.exports.sync = spawnSync;
+
+module.exports._parse = parse;
+module.exports._enoent = enoent;
+
+
+/***/ }),
+
+/***/ 34:
+/***/ (function(module) {
+
+module.exports = require("https");
+
+/***/ }),
+
+/***/ 36:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = parseOptions;
+
+const { Deprecation } = __webpack_require__(692);
+const { getUserAgent } = __webpack_require__(796);
+const once = __webpack_require__(969);
+
+const pkg = __webpack_require__(919);
+
+const deprecateOptionsTimeout = once((log, deprecation) =>
+ log.warn(deprecation)
+);
+const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation));
+const deprecateOptionsHeaders = once((log, deprecation) =>
+ log.warn(deprecation)
+);
+
+function parseOptions(options, log, hook) {
+ if (options.headers) {
+ options.headers = Object.keys(options.headers).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = options.headers[key];
+ return newObj;
+ }, {});
+ }
+
+ const clientDefaults = {
+ headers: options.headers || {},
+ request: options.request || {},
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ };
+
+ if (options.baseUrl) {
+ clientDefaults.baseUrl = options.baseUrl;
+ }
+
+ if (options.userAgent) {
+ clientDefaults.headers["user-agent"] = options.userAgent;
+ }
+
+ if (options.previews) {
+ clientDefaults.mediaType.previews = options.previews;
+ }
+
+ if (options.timeZone) {
+ clientDefaults.headers["time-zone"] = options.timeZone;
+ }
+
+ if (options.timeout) {
+ deprecateOptionsTimeout(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ clientDefaults.request.timeout = options.timeout;
+ }
+
+ if (options.agent) {
+ deprecateOptionsAgent(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ clientDefaults.request.agent = options.agent;
+ }
+
+ if (options.headers) {
+ deprecateOptionsHeaders(
+ log,
+ new Deprecation(
+ "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request"
+ )
+ );
+ }
+
+ const userAgentOption = clientDefaults.headers["user-agent"];
+ const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`;
+
+ clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent]
+ .filter(Boolean)
+ .join(" ");
+
+ clientDefaults.request.hook = hook.bind(null, "request");
+
+ return clientDefaults;
+}
+
+
+/***/ }),
+
+/***/ 37:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const factory = __webpack_require__(513);
+
+module.exports = factory();
+
+
+/***/ }),
+
+/***/ 39:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = opts => {
+ opts = opts || {};
+
+ const env = opts.env || process.env;
+ const platform = opts.platform || process.platform;
+
+ if (platform !== 'win32') {
+ return 'PATH';
+ }
+
+ return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path';
+};
+
+
+/***/ }),
+
+/***/ 48:
+/***/ (function(module, exports) {
+
+exports = module.exports = SemVer
+
+var debug
+/* istanbul ignore next */
+if (typeof process === 'object' &&
+ process.env &&
+ process.env.NODE_DEBUG &&
+ /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
+ debug = function () {
+ var args = Array.prototype.slice.call(arguments, 0)
+ args.unshift('SEMVER')
+ console.log.apply(console, args)
+ }
+} else {
+ debug = function () {}
+}
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0'
+
+var MAX_LENGTH = 256
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
+ /* istanbul ignore next */ 9007199254740991
+
+// Max safe segment length for coercion.
+var MAX_SAFE_COMPONENT_LENGTH = 16
+
+// The actual regexps go on exports.re
+var re = exports.re = []
+var src = exports.src = []
+var R = 0
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+var NUMERICIDENTIFIER = R++
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
+var NUMERICIDENTIFIERLOOSE = R++
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+var NONNUMERICIDENTIFIER = R++
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+var MAINVERSION = R++
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')'
+
+var MAINVERSIONLOOSE = R++
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+var PRERELEASEIDENTIFIER = R++
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
+
+var PRERELEASEIDENTIFIERLOOSE = R++
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+var PRERELEASE = R++
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
+
+var PRERELEASELOOSE = R++
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+var BUILDIDENTIFIER = R++
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+var BUILD = R++
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+var FULL = R++
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?'
+
+src[FULL] = '^' + FULLPLAIN + '$'
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?'
+
+var LOOSE = R++
+src[LOOSE] = '^' + LOOSEPLAIN + '$'
+
+var GTLT = R++
+src[GTLT] = '((?:<|>)?=?)'
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
+var XRANGEIDENTIFIER = R++
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
+
+var XRANGEPLAIN = R++
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
+
+var XRANGEPLAINLOOSE = R++
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
+
+var XRANGE = R++
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
+var XRANGELOOSE = R++
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
+
+// Coercion.
+// Extract anything that could conceivably be a part of a valid semver
+var COERCE = R++
+src[COERCE] = '(?:^|[^\\d])' +
+ '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:$|[^\\d])'
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++
+src[LONETILDE] = '(?:~>?)'
+
+var TILDETRIM = R++
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
+var tildeTrimReplace = '$1~'
+
+var TILDE = R++
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
+var TILDELOOSE = R++
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++
+src[LONECARET] = '(?:\\^)'
+
+var CARETTRIM = R++
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
+var caretTrimReplace = '$1^'
+
+var CARET = R++
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
+var CARETLOOSE = R++
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
+var COMPARATOR = R++
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
+
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
+var comparatorTrimReplace = '$1$2$3'
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$'
+
+var HYPHENRANGELOOSE = R++
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$'
+
+// Star ranges basically just allow anything at all.
+var STAR = R++
+src[STAR] = '(<|>)?=?\\s*\\*'
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i])
+ if (!re[i]) {
+ re[i] = new RegExp(src[i])
+ }
+}
+
+exports.parse = parse
+function parse (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (version instanceof SemVer) {
+ return version
+ }
+
+ if (typeof version !== 'string') {
+ return null
+ }
+
+ if (version.length > MAX_LENGTH) {
+ return null
+ }
+
+ var r = options.loose ? re[LOOSE] : re[FULL]
+ if (!r.test(version)) {
+ return null
+ }
+
+ try {
+ return new SemVer(version, options)
+ } catch (er) {
+ return null
+ }
+}
+
+exports.valid = valid
+function valid (version, options) {
+ var v = parse(version, options)
+ return v ? v.version : null
+}
+
+exports.clean = clean
+function clean (version, options) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), options)
+ return s ? s.version : null
+}
+
+exports.SemVer = SemVer
+
+function SemVer (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+ if (version instanceof SemVer) {
+ if (version.loose === options.loose) {
+ return version
+ } else {
+ version = version.version
+ }
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version)
+ }
+
+ if (version.length > MAX_LENGTH) {
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+ }
+
+ if (!(this instanceof SemVer)) {
+ return new SemVer(version, options)
+ }
+
+ debug('SemVer', version, options)
+ this.options = options
+ this.loose = !!options.loose
+
+ var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
+
+ if (!m) {
+ throw new TypeError('Invalid Version: ' + version)
+ }
+
+ this.raw = version
+
+ // these are actually numbers
+ this.major = +m[1]
+ this.minor = +m[2]
+ this.patch = +m[3]
+
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
+ throw new TypeError('Invalid major version')
+ }
+
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
+ throw new TypeError('Invalid minor version')
+ }
+
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
+ throw new TypeError('Invalid patch version')
+ }
+
+ // numberify any prerelease numeric ids
+ if (!m[4]) {
+ this.prerelease = []
+ } else {
+ this.prerelease = m[4].split('.').map(function (id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id
+ if (num >= 0 && num < MAX_SAFE_INTEGER) {
+ return num
+ }
+ }
+ return id
+ })
+ }
+
+ this.build = m[5] ? m[5].split('.') : []
+ this.format()
+}
+
+SemVer.prototype.format = function () {
+ this.version = this.major + '.' + this.minor + '.' + this.patch
+ if (this.prerelease.length) {
+ this.version += '-' + this.prerelease.join('.')
+ }
+ return this.version
+}
+
+SemVer.prototype.toString = function () {
+ return this.version
+}
+
+SemVer.prototype.compare = function (other) {
+ debug('SemVer.compare', this.version, this.options, other)
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ return this.compareMain(other) || this.comparePre(other)
+}
+
+SemVer.prototype.compareMain = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch)
+}
+
+SemVer.prototype.comparePre = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length) {
+ return -1
+ } else if (!this.prerelease.length && other.prerelease.length) {
+ return 1
+ } else if (!this.prerelease.length && !other.prerelease.length) {
+ return 0
+ }
+
+ var i = 0
+ do {
+ var a = this.prerelease[i]
+ var b = other.prerelease[i]
+ debug('prerelease compare', i, a, b)
+ if (a === undefined && b === undefined) {
+ return 0
+ } else if (b === undefined) {
+ return 1
+ } else if (a === undefined) {
+ return -1
+ } else if (a === b) {
+ continue
+ } else {
+ return compareIdentifiers(a, b)
+ }
+ } while (++i)
+}
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function (release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor = 0
+ this.major++
+ this.inc('pre', identifier)
+ break
+ case 'preminor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor++
+ this.inc('pre', identifier)
+ break
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0
+ this.inc('patch', identifier)
+ this.inc('pre', identifier)
+ break
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0) {
+ this.inc('patch', identifier)
+ }
+ this.inc('pre', identifier)
+ break
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 ||
+ this.patch !== 0 ||
+ this.prerelease.length === 0) {
+ this.major++
+ }
+ this.minor = 0
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0) {
+ this.minor++
+ }
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0) {
+ this.patch++
+ }
+ this.prerelease = []
+ break
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0) {
+ this.prerelease = [0]
+ } else {
+ var i = this.prerelease.length
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++
+ i = -2
+ }
+ }
+ if (i === -1) {
+ // didn't increment anything
+ this.prerelease.push(0)
+ }
+ }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1])) {
+ this.prerelease = [identifier, 0]
+ }
+ } else {
+ this.prerelease = [identifier, 0]
+ }
+ }
+ break
+
+ default:
+ throw new Error('invalid increment argument: ' + release)
+ }
+ this.format()
+ this.raw = this.version
+ return this
+}
+
+exports.inc = inc
+function inc (version, release, loose, identifier) {
+ if (typeof (loose) === 'string') {
+ identifier = loose
+ loose = undefined
+ }
+
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version
+ } catch (er) {
+ return null
+ }
+}
+
+exports.diff = diff
+function diff (version1, version2) {
+ if (eq(version1, version2)) {
+ return null
+ } else {
+ var v1 = parse(version1)
+ var v2 = parse(version2)
+ var prefix = ''
+ if (v1.prerelease.length || v2.prerelease.length) {
+ prefix = 'pre'
+ var defaultResult = 'prerelease'
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return prefix + key
+ }
+ }
+ }
+ return defaultResult // may be undefined
+ }
+}
+
+exports.compareIdentifiers = compareIdentifiers
+
+var numeric = /^[0-9]+$/
+function compareIdentifiers (a, b) {
+ var anum = numeric.test(a)
+ var bnum = numeric.test(b)
+
+ if (anum && bnum) {
+ a = +a
+ b = +b
+ }
+
+ return a === b ? 0
+ : (anum && !bnum) ? -1
+ : (bnum && !anum) ? 1
+ : a < b ? -1
+ : 1
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers
+function rcompareIdentifiers (a, b) {
+ return compareIdentifiers(b, a)
+}
+
+exports.major = major
+function major (a, loose) {
+ return new SemVer(a, loose).major
+}
+
+exports.minor = minor
+function minor (a, loose) {
+ return new SemVer(a, loose).minor
+}
+
+exports.patch = patch
+function patch (a, loose) {
+ return new SemVer(a, loose).patch
+}
+
+exports.compare = compare
+function compare (a, b, loose) {
+ return new SemVer(a, loose).compare(new SemVer(b, loose))
+}
+
+exports.compareLoose = compareLoose
+function compareLoose (a, b) {
+ return compare(a, b, true)
+}
+
+exports.rcompare = rcompare
+function rcompare (a, b, loose) {
+ return compare(b, a, loose)
+}
+
+exports.sort = sort
+function sort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.compare(a, b, loose)
+ })
+}
+
+exports.rsort = rsort
+function rsort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.rcompare(a, b, loose)
+ })
+}
+
+exports.gt = gt
+function gt (a, b, loose) {
+ return compare(a, b, loose) > 0
+}
+
+exports.lt = lt
+function lt (a, b, loose) {
+ return compare(a, b, loose) < 0
+}
+
+exports.eq = eq
+function eq (a, b, loose) {
+ return compare(a, b, loose) === 0
+}
+
+exports.neq = neq
+function neq (a, b, loose) {
+ return compare(a, b, loose) !== 0
+}
+
+exports.gte = gte
+function gte (a, b, loose) {
+ return compare(a, b, loose) >= 0
+}
+
+exports.lte = lte
+function lte (a, b, loose) {
+ return compare(a, b, loose) <= 0
+}
+
+exports.cmp = cmp
+function cmp (a, op, b, loose) {
+ switch (op) {
+ case '===':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a === b
+
+ case '!==':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a !== b
+
+ case '':
+ case '=':
+ case '==':
+ return eq(a, b, loose)
+
+ case '!=':
+ return neq(a, b, loose)
+
+ case '>':
+ return gt(a, b, loose)
+
+ case '>=':
+ return gte(a, b, loose)
+
+ case '<':
+ return lt(a, b, loose)
+
+ case '<=':
+ return lte(a, b, loose)
+
+ default:
+ throw new TypeError('Invalid operator: ' + op)
+ }
+}
+
+exports.Comparator = Comparator
+function Comparator (comp, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (comp instanceof Comparator) {
+ if (comp.loose === !!options.loose) {
+ return comp
+ } else {
+ comp = comp.value
+ }
+ }
+
+ if (!(this instanceof Comparator)) {
+ return new Comparator(comp, options)
+ }
+
+ debug('comparator', comp, options)
+ this.options = options
+ this.loose = !!options.loose
+ this.parse(comp)
+
+ if (this.semver === ANY) {
+ this.value = ''
+ } else {
+ this.value = this.operator + this.semver.version
+ }
+
+ debug('comp', this)
+}
+
+var ANY = {}
+Comparator.prototype.parse = function (comp) {
+ var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var m = comp.match(r)
+
+ if (!m) {
+ throw new TypeError('Invalid comparator: ' + comp)
+ }
+
+ this.operator = m[1]
+ if (this.operator === '=') {
+ this.operator = ''
+ }
+
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2]) {
+ this.semver = ANY
+ } else {
+ this.semver = new SemVer(m[2], this.options.loose)
+ }
+}
+
+Comparator.prototype.toString = function () {
+ return this.value
+}
+
+Comparator.prototype.test = function (version) {
+ debug('Comparator.test', version, this.options.loose)
+
+ if (this.semver === ANY) {
+ return true
+ }
+
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
+ }
+
+ return cmp(version, this.operator, this.semver, this.options)
+}
+
+Comparator.prototype.intersects = function (comp, options) {
+ if (!(comp instanceof Comparator)) {
+ throw new TypeError('a Comparator is required')
+ }
+
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ var rangeTmp
+
+ if (this.operator === '') {
+ rangeTmp = new Range(comp.value, options)
+ return satisfies(this.value, rangeTmp, options)
+ } else if (comp.operator === '') {
+ rangeTmp = new Range(this.value, options)
+ return satisfies(comp.semver, rangeTmp, options)
+ }
+
+ var sameDirectionIncreasing =
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '>=' || comp.operator === '>')
+ var sameDirectionDecreasing =
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '<=' || comp.operator === '<')
+ var sameSemVer = this.semver.version === comp.semver.version
+ var differentDirectionsInclusive =
+ (this.operator === '>=' || this.operator === '<=') &&
+ (comp.operator === '>=' || comp.operator === '<=')
+ var oppositeDirectionsLessThan =
+ cmp(this.semver, '<', comp.semver, options) &&
+ ((this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '<=' || comp.operator === '<'))
+ var oppositeDirectionsGreaterThan =
+ cmp(this.semver, '>', comp.semver, options) &&
+ ((this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '>=' || comp.operator === '>'))
+
+ return sameDirectionIncreasing || sameDirectionDecreasing ||
+ (sameSemVer && differentDirectionsInclusive) ||
+ oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
+}
+
+exports.Range = Range
+function Range (range, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
+
+ if (range instanceof Range) {
+ if (range.loose === !!options.loose &&
+ range.includePrerelease === !!options.includePrerelease) {
+ return range
+ } else {
+ return new Range(range.raw, options)
+ }
+ }
+
+ if (range instanceof Comparator) {
+ return new Range(range.value, options)
+ }
+
+ if (!(this instanceof Range)) {
+ return new Range(range, options)
+ }
+
+ this.options = options
+ this.loose = !!options.loose
+ this.includePrerelease = !!options.includePrerelease
+
+ // First, split based on boolean or ||
+ this.raw = range
+ this.set = range.split(/\s*\|\|\s*/).map(function (range) {
+ return this.parseRange(range.trim())
+ }, this).filter(function (c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length
+ })
+
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range)
+ }
+
+ this.format()
+}
+
+Range.prototype.format = function () {
+ this.range = this.set.map(function (comps) {
+ return comps.join(' ').trim()
+ }).join('||').trim()
+ return this.range
+}
+
+Range.prototype.toString = function () {
+ return this.range
+}
+
+Range.prototype.parseRange = function (range) {
+ var loose = this.options.loose
+ range = range.trim()
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
+ range = range.replace(hr, hyphenReplace)
+ debug('hyphen replace', range)
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
+ debug('comparator trim', range, re[COMPARATORTRIM])
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace)
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace)
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ')
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var set = range.split(' ').map(function (comp) {
+ return parseComparator(comp, this.options)
+ }, this).join(' ').split(/\s+/)
+ if (this.options.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function (comp) {
+ return !!comp.match(compRe)
+ })
+ }
+ set = set.map(function (comp) {
+ return new Comparator(comp, this.options)
+ }, this)
+
+ return set
+}
+
+Range.prototype.intersects = function (range, options) {
+ if (!(range instanceof Range)) {
+ throw new TypeError('a Range is required')
+ }
+
+ return this.set.some(function (thisComparators) {
+ return thisComparators.every(function (thisComparator) {
+ return range.set.some(function (rangeComparators) {
+ return rangeComparators.every(function (rangeComparator) {
+ return thisComparator.intersects(rangeComparator, options)
+ })
+ })
+ })
+ })
+}
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators
+function toComparators (range, options) {
+ return new Range(range, options).set.map(function (comp) {
+ return comp.map(function (c) {
+ return c.value
+ }).join(' ').trim().split(' ')
+ })
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator (comp, options) {
+ debug('comp', comp, options)
+ comp = replaceCarets(comp, options)
+ debug('caret', comp)
+ comp = replaceTildes(comp, options)
+ debug('tildes', comp)
+ comp = replaceXRanges(comp, options)
+ debug('xrange', comp)
+ comp = replaceStars(comp, options)
+ debug('stars', comp)
+ return comp
+}
+
+function isX (id) {
+ return !id || id.toLowerCase() === 'x' || id === '*'
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceTilde(comp, options)
+ }).join(' ')
+}
+
+function replaceTilde (comp, options) {
+ var r = options.loose ? re[TILDELOOSE] : re[TILDE]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr)
+ var ret
+
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ // ~1.2 == >=1.2.0 <1.3.0
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else if (pr) {
+ debug('replaceTilde pr', pr)
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+
+ debug('tilde return', ret)
+ return ret
+ })
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceCaret(comp, options)
+ }).join(' ')
+}
+
+function replaceCaret (comp, options) {
+ debug('caret', comp, options)
+ var r = options.loose ? re[CARETLOOSE] : re[CARET]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr)
+ var ret
+
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ if (M === '0') {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
+ }
+ } else if (pr) {
+ debug('replaceCaret pr', pr)
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + (+M + 1) + '.0.0'
+ }
+ } else {
+ debug('no pr')
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0'
+ }
+ }
+
+ debug('caret return', ret)
+ return ret
+ })
+}
+
+function replaceXRanges (comp, options) {
+ debug('replaceXRanges', comp, options)
+ return comp.split(/\s+/).map(function (comp) {
+ return replaceXRange(comp, options)
+ }).join(' ')
+}
+
+function replaceXRange (comp, options) {
+ comp = comp.trim()
+ var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
+ return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr)
+ var xM = isX(M)
+ var xm = xM || isX(m)
+ var xp = xm || isX(p)
+ var anyX = xp
+
+ if (gtlt === '=' && anyX) {
+ gtlt = ''
+ }
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0'
+ } else {
+ // nothing is forbidden
+ ret = '*'
+ }
+ } else if (gtlt && anyX) {
+ // we know patch is an x, because we have any x at all.
+ // replace X with 0
+ if (xm) {
+ m = 0
+ }
+ p = 0
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>='
+ if (xm) {
+ M = +M + 1
+ m = 0
+ p = 0
+ } else {
+ m = +m + 1
+ p = 0
+ }
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<'
+ if (xm) {
+ M = +M + 1
+ } else {
+ m = +m + 1
+ }
+ }
+
+ ret = gtlt + M + '.' + m + '.' + p
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ }
+
+ debug('xRange return', ret)
+
+ return ret
+ })
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars (comp, options) {
+ debug('replaceStars', comp, options)
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '')
+}
+
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace ($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+ if (isX(fM)) {
+ from = ''
+ } else if (isX(fm)) {
+ from = '>=' + fM + '.0.0'
+ } else if (isX(fp)) {
+ from = '>=' + fM + '.' + fm + '.0'
+ } else {
+ from = '>=' + from
+ }
+
+ if (isX(tM)) {
+ to = ''
+ } else if (isX(tm)) {
+ to = '<' + (+tM + 1) + '.0.0'
+ } else if (isX(tp)) {
+ to = '<' + tM + '.' + (+tm + 1) + '.0'
+ } else if (tpr) {
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
+ } else {
+ to = '<=' + to
+ }
+
+ return (from + ' ' + to).trim()
+}
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function (version) {
+ if (!version) {
+ return false
+ }
+
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
+ }
+
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version, this.options)) {
+ return true
+ }
+ }
+ return false
+}
+
+function testSet (set, version, options) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version)) {
+ return false
+ }
+ }
+
+ if (version.prerelease.length && !options.includePrerelease) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (i = 0; i < set.length; i++) {
+ debug(set[i].semver)
+ if (set[i].semver === ANY) {
+ continue
+ }
+
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch) {
+ return true
+ }
+ }
+ }
+
+ // Version has a -pre, but it's not one of the ones we like.
+ return false
+ }
+
+ return true
+}
+
+exports.satisfies = satisfies
+function satisfies (version, range, options) {
+ try {
+ range = new Range(range, options)
+ } catch (er) {
+ return false
+ }
+ return range.test(version)
+}
+
+exports.maxSatisfying = maxSatisfying
+function maxSatisfying (versions, range, options) {
+ var max = null
+ var maxSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!max || maxSV.compare(v) === -1) {
+ // compare(max, v, true)
+ max = v
+ maxSV = new SemVer(max, options)
+ }
+ }
+ })
+ return max
+}
+
+exports.minSatisfying = minSatisfying
+function minSatisfying (versions, range, options) {
+ var min = null
+ var minSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!min || minSV.compare(v) === 1) {
+ // compare(min, v, true)
+ min = v
+ minSV = new SemVer(min, options)
+ }
+ }
+ })
+ return min
+}
+
+exports.minVersion = minVersion
+function minVersion (range, loose) {
+ range = new Range(range, loose)
+
+ var minver = new SemVer('0.0.0')
+ if (range.test(minver)) {
+ return minver
+ }
+
+ minver = new SemVer('0.0.0-0')
+ if (range.test(minver)) {
+ return minver
+ }
+
+ minver = null
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+
+ comparators.forEach(function (comparator) {
+ // Clone to avoid manipulating the comparator's semver object.
+ var compver = new SemVer(comparator.semver.version)
+ switch (comparator.operator) {
+ case '>':
+ if (compver.prerelease.length === 0) {
+ compver.patch++
+ } else {
+ compver.prerelease.push(0)
+ }
+ compver.raw = compver.format()
+ /* fallthrough */
+ case '':
+ case '>=':
+ if (!minver || gt(minver, compver)) {
+ minver = compver
+ }
+ break
+ case '<':
+ case '<=':
+ /* Ignore maximum versions */
+ break
+ /* istanbul ignore next */
+ default:
+ throw new Error('Unexpected operation: ' + comparator.operator)
+ }
+ })
+ }
+
+ if (minver && range.test(minver)) {
+ return minver
+ }
+
+ return null
+}
+
+exports.validRange = validRange
+function validRange (range, options) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, options).range || '*'
+ } catch (er) {
+ return null
+ }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr
+function ltr (version, range, options) {
+ return outside(version, range, '<', options)
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr
+function gtr (version, range, options) {
+ return outside(version, range, '>', options)
+}
+
+exports.outside = outside
+function outside (version, range, hilo, options) {
+ version = new SemVer(version, options)
+ range = new Range(range, options)
+
+ var gtfn, ltefn, ltfn, comp, ecomp
+ switch (hilo) {
+ case '>':
+ gtfn = gt
+ ltefn = lte
+ ltfn = lt
+ comp = '>'
+ ecomp = '>='
+ break
+ case '<':
+ gtfn = lt
+ ltefn = gte
+ ltfn = gt
+ comp = '<'
+ ecomp = '<='
+ break
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"')
+ }
+
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, options)) {
+ return false
+ }
+
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
+
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+
+ var high = null
+ var low = null
+
+ comparators.forEach(function (comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator
+ low = low || comparator
+ if (gtfn(comparator.semver, high.semver, options)) {
+ high = comparator
+ } else if (ltfn(comparator.semver, low.semver, options)) {
+ low = comparator
+ }
+ })
+
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false
+ }
+
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false
+ }
+ }
+ return true
+}
+
+exports.prerelease = prerelease
+function prerelease (version, options) {
+ var parsed = parse(version, options)
+ return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
+}
+
+exports.intersects = intersects
+function intersects (r1, r2, options) {
+ r1 = new Range(r1, options)
+ r2 = new Range(r2, options)
+ return r1.intersects(r2)
+}
+
+exports.coerce = coerce
+function coerce (version) {
+ if (version instanceof SemVer) {
+ return version
+ }
+
+ if (typeof version !== 'string') {
+ return null
+ }
+
+ var match = version.match(re[COERCE])
+
+ if (match == null) {
+ return null
+ }
+
+ return parse(match[1] +
+ '.' + (match[2] || '0') +
+ '.' + (match[3] || '0'))
+}
+
+
+/***/ }),
+
+/***/ 49:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const os = __webpack_require__(87);
+const execa = __webpack_require__(955);
+
+// Reference: https://www.gaijin.at/en/lstwinver.php
+const names = new Map([
+ ['10.0', '10'],
+ ['6.3', '8.1'],
+ ['6.2', '8'],
+ ['6.1', '7'],
+ ['6.0', 'Vista'],
+ ['5.2', 'Server 2003'],
+ ['5.1', 'XP'],
+ ['5.0', '2000'],
+ ['4.9', 'ME'],
+ ['4.1', '98'],
+ ['4.0', '95']
+]);
+
+const windowsRelease = release => {
+ const version = /\d+\.\d/.exec(release || os.release());
+
+ if (release && !version) {
+ throw new Error('`release` argument doesn\'t match `n.n`');
+ }
+
+ const ver = (version || [])[0];
+
+ // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
+ // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
+ // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
+ // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead.
+ // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name.
+ if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) {
+ let stdout;
+ try {
+ stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || '';
+ } catch (_) {
+ stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || '';
+ }
+
+ const year = (stdout.match(/2008|2012|2016|2019/) || [])[0];
+
+ if (year) {
+ return `Server ${year}`;
+ }
+ }
+
+ return names.get(ver);
+};
+
+module.exports = windowsRelease;
+
+
/***/ }),
/***/ 52:
@@ -1062,6 +2756,195 @@ function findLintVersion() {
exports.findLintVersion = findLintVersion;
+/***/ }),
+
+/***/ 63:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = validate;
+
+const { RequestError } = __webpack_require__(102);
+const get = __webpack_require__(854);
+const set = __webpack_require__(883);
+
+function validate(octokit, options) {
+ if (!options.request.validate) {
+ return;
+ }
+ const { validate: params } = options.request;
+
+ Object.keys(params).forEach(parameterName => {
+ const parameter = get(params, parameterName);
+
+ const expectedType = parameter.type;
+ let parentParameterName;
+ let parentValue;
+ let parentParamIsPresent = true;
+ let parentParameterIsArray = false;
+
+ if (/\./.test(parameterName)) {
+ parentParameterName = parameterName.replace(/\.[^.]+$/, "");
+ parentParameterIsArray = parentParameterName.slice(-2) === "[]";
+ if (parentParameterIsArray) {
+ parentParameterName = parentParameterName.slice(0, -2);
+ }
+ parentValue = get(options, parentParameterName);
+ parentParamIsPresent =
+ parentParameterName === "headers" ||
+ (typeof parentValue === "object" && parentValue !== null);
+ }
+
+ const values = parentParameterIsArray
+ ? (get(options, parentParameterName) || []).map(
+ value => value[parameterName.split(/\./).pop()]
+ )
+ : [get(options, parameterName)];
+
+ values.forEach((value, i) => {
+ const valueIsPresent = typeof value !== "undefined";
+ const valueIsNull = value === null;
+ const currentParameterName = parentParameterIsArray
+ ? parameterName.replace(/\[\]/, `[${i}]`)
+ : parameterName;
+
+ if (!parameter.required && !valueIsPresent) {
+ return;
+ }
+
+ // if the parent parameter is of type object but allows null
+ // then the child parameters can be ignored
+ if (!parentParamIsPresent) {
+ return;
+ }
+
+ if (parameter.allowNull && valueIsNull) {
+ return;
+ }
+
+ if (!parameter.allowNull && valueIsNull) {
+ throw new RequestError(
+ `'${currentParameterName}' cannot be null`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ if (parameter.required && !valueIsPresent) {
+ throw new RequestError(
+ `Empty value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ // parse to integer before checking for enum
+ // so that string "1" will match enum with number 1
+ if (expectedType === "integer") {
+ const unparsedValue = value;
+ value = parseInt(value, 10);
+ if (isNaN(value)) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ unparsedValue
+ )} is NaN`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+
+ if (parameter.validation) {
+ const regex = new RegExp(parameter.validation);
+ if (!regex.test(value)) {
+ throw new RequestError(
+ `Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ if (expectedType === "object" && typeof value === "string") {
+ try {
+ value = JSON.parse(value);
+ } catch (exception) {
+ throw new RequestError(
+ `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify(
+ value
+ )}`,
+ 400,
+ {
+ request: options
+ }
+ );
+ }
+ }
+
+ set(options, parameter.mapTo || currentParameterName, value);
+ });
+ });
+
+ return options;
+}
+
+
+/***/ }),
+
+/***/ 85:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = withAuthorizationPrefix;
+
+const atob = __webpack_require__(368);
+
+const REGEX_IS_BASIC_AUTH = /^[\w-]+:/;
+
+function withAuthorizationPrefix(authorization) {
+ if (/^(basic|bearer|token) /i.test(authorization)) {
+ return authorization;
+ }
+
+ try {
+ if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) {
+ return `basic ${authorization}`;
+ }
+ } catch (error) {}
+
+ if (authorization.split(/\./).length === 3) {
+ return `bearer ${authorization}`;
+ }
+
+ return `token ${authorization}`;
+}
+
+
/***/ }),
/***/ 87:
@@ -1999,6 +3882,69 @@ function regExpEscape (s) {
}
+/***/ }),
+
+/***/ 102:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __webpack_require__(692);
+var once = _interopDefault(__webpack_require__(969));
+
+const logOnce = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ this.headers = options.headers || {}; // redact request credentials without mutating original request options
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
/***/ }),
/***/ 105:
@@ -2032,6 +3978,1259 @@ function bytesToUuid(buf, offset) {
module.exports = bytesToUuid;
+/***/ }),
+
+/***/ 117:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+var pathModule = __webpack_require__(622);
+var isWindows = process.platform === 'win32';
+var fs = __webpack_require__(747);
+
+// JavaScript implementation of realpath, ported from node pre-v6
+
+var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
+
+function rethrow() {
+ // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
+ // is fairly slow to generate.
+ var callback;
+ if (DEBUG) {
+ var backtrace = new Error;
+ callback = debugCallback;
+ } else
+ callback = missingCallback;
+
+ return callback;
+
+ function debugCallback(err) {
+ if (err) {
+ backtrace.message = err.message;
+ err = backtrace;
+ missingCallback(err);
+ }
+ }
+
+ function missingCallback(err) {
+ if (err) {
+ if (process.throwDeprecation)
+ throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+ else if (!process.noDeprecation) {
+ var msg = 'fs: missing callback ' + (err.stack || err.message);
+ if (process.traceDeprecation)
+ console.trace(msg);
+ else
+ console.error(msg);
+ }
+ }
+ }
+}
+
+function maybeCallback(cb) {
+ return typeof cb === 'function' ? cb : rethrow();
+}
+
+var normalize = pathModule.normalize;
+
+// Regexp that finds the next partion of a (partial) path
+// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
+if (isWindows) {
+ var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
+} else {
+ var nextPartRe = /(.*?)(?:[\/]+|$)/g;
+}
+
+// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
+if (isWindows) {
+ var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
+} else {
+ var splitRootRe = /^[\/]*/;
+}
+
+exports.realpathSync = function realpathSync(p, cache) {
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return cache[p];
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstatSync(base);
+ knownHard[base] = true;
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ // NB: p.length changes.
+ while (pos < p.length) {
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ continue;
+ }
+
+ var resolvedLink;
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // some known symbolic link. no need to stat again.
+ resolvedLink = cache[base];
+ } else {
+ var stat = fs.lstatSync(base);
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ continue;
+ }
+
+ // read the link if it wasn't read before
+ // dev/ino always return 0 on windows, so skip the check.
+ var linkTarget = null;
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ linkTarget = seenLinks[id];
+ }
+ }
+ if (linkTarget === null) {
+ fs.statSync(base);
+ linkTarget = fs.readlinkSync(base);
+ }
+ resolvedLink = pathModule.resolve(previous, linkTarget);
+ // track this, if given a cache.
+ if (cache) cache[base] = resolvedLink;
+ if (!isWindows) seenLinks[id] = linkTarget;
+ }
+
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+
+ if (cache) cache[original] = p;
+
+ return p;
+};
+
+
+exports.realpath = function realpath(p, cache, cb) {
+ if (typeof cb !== 'function') {
+ cb = maybeCallback(cache);
+ cache = null;
+ }
+
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return process.nextTick(cb.bind(null, null, cache[p]));
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstat(base, function(err) {
+ if (err) return cb(err);
+ knownHard[base] = true;
+ LOOP();
+ });
+ } else {
+ process.nextTick(LOOP);
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ function LOOP() {
+ // stop if scanned past end of path
+ if (pos >= p.length) {
+ if (cache) cache[original] = p;
+ return cb(null, p);
+ }
+
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ return process.nextTick(LOOP);
+ }
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // known symbolic link. no need to stat again.
+ return gotResolvedLink(cache[base]);
+ }
+
+ return fs.lstat(base, gotStat);
+ }
+
+ function gotStat(err, stat) {
+ if (err) return cb(err);
+
+ // if not a symlink, skip to the next path part
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ return process.nextTick(LOOP);
+ }
+
+ // stat & read the link if not read before
+ // call gotTarget as soon as the link target is known
+ // dev/ino always return 0 on windows, so skip the check.
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ return gotTarget(null, seenLinks[id], base);
+ }
+ }
+ fs.stat(base, function(err) {
+ if (err) return cb(err);
+
+ fs.readlink(base, function(err, target) {
+ if (!isWindows) seenLinks[id] = target;
+ gotTarget(err, target);
+ });
+ });
+ }
+
+ function gotTarget(err, target, base) {
+ if (err) return cb(err);
+
+ var resolvedLink = pathModule.resolve(previous, target);
+ if (cache) cache[base] = resolvedLink;
+ gotResolvedLink(resolvedLink);
+ }
+
+ function gotResolvedLink(resolvedLink) {
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+};
+
+
+/***/ }),
+
+/***/ 118:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const os = __webpack_require__(87);
+
+const nameMap = new Map([
+ [19, 'Catalina'],
+ [18, 'Mojave'],
+ [17, 'High Sierra'],
+ [16, 'Sierra'],
+ [15, 'El Capitan'],
+ [14, 'Yosemite'],
+ [13, 'Mavericks'],
+ [12, 'Mountain Lion'],
+ [11, 'Lion'],
+ [10, 'Snow Leopard'],
+ [9, 'Leopard'],
+ [8, 'Tiger'],
+ [7, 'Panther'],
+ [6, 'Jaguar'],
+ [5, 'Puma']
+]);
+
+const macosRelease = release => {
+ release = Number((release || os.release()).split('.')[0]);
+ return {
+ name: nameMap.get(release),
+ version: '10.' + (release - 4)
+ };
+};
+
+module.exports = macosRelease;
+// TODO: remove this in the next major version
+module.exports.default = macosRelease;
+
+
+/***/ }),
+
+/***/ 126:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the size to enable large array optimizations. */
+var LARGE_ARRAY_SIZE = 200;
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]';
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * A specialized version of `_.includes` for arrays without support for
+ * specifying an index to search from.
+ *
+ * @private
+ * @param {Array} [array] The array to inspect.
+ * @param {*} target The value to search for.
+ * @returns {boolean} Returns `true` if `target` is found, else `false`.
+ */
+function arrayIncludes(array, value) {
+ var length = array ? array.length : 0;
+ return !!length && baseIndexOf(array, value, 0) > -1;
+}
+
+/**
+ * This function is like `arrayIncludes` except that it accepts a comparator.
+ *
+ * @private
+ * @param {Array} [array] The array to inspect.
+ * @param {*} target The value to search for.
+ * @param {Function} comparator The comparator invoked per element.
+ * @returns {boolean} Returns `true` if `target` is found, else `false`.
+ */
+function arrayIncludesWith(array, value, comparator) {
+ var index = -1,
+ length = array ? array.length : 0;
+
+ while (++index < length) {
+ if (comparator(value, array[index])) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/**
+ * The base implementation of `_.findIndex` and `_.findLastIndex` without
+ * support for iteratee shorthands.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {Function} predicate The function invoked per iteration.
+ * @param {number} fromIndex The index to search from.
+ * @param {boolean} [fromRight] Specify iterating from right to left.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function baseFindIndex(array, predicate, fromIndex, fromRight) {
+ var length = array.length,
+ index = fromIndex + (fromRight ? 1 : -1);
+
+ while ((fromRight ? index-- : ++index < length)) {
+ if (predicate(array[index], index, array)) {
+ return index;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.indexOf` without `fromIndex` bounds checks.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} value The value to search for.
+ * @param {number} fromIndex The index to search from.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function baseIndexOf(array, value, fromIndex) {
+ if (value !== value) {
+ return baseFindIndex(array, baseIsNaN, fromIndex);
+ }
+ var index = fromIndex - 1,
+ length = array.length;
+
+ while (++index < length) {
+ if (array[index] === value) {
+ return index;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNaN` without support for number objects.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`.
+ */
+function baseIsNaN(value) {
+ return value !== value;
+}
+
+/**
+ * Checks if a cache value for `key` exists.
+ *
+ * @private
+ * @param {Object} cache The cache to query.
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function cacheHas(cache, key) {
+ return cache.has(key);
+}
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/**
+ * Converts `set` to an array of its values.
+ *
+ * @private
+ * @param {Object} set The set to convert.
+ * @returns {Array} Returns the values.
+ */
+function setToArray(set) {
+ var index = -1,
+ result = Array(set.size);
+
+ set.forEach(function(value) {
+ result[++index] = value;
+ });
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ Set = getNative(root, 'Set'),
+ nativeCreate = getNative(Object, 'create');
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ *
+ * Creates an array cache object to store unique values.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [values] The values to cache.
+ */
+function SetCache(values) {
+ var index = -1,
+ length = values ? values.length : 0;
+
+ this.__data__ = new MapCache;
+ while (++index < length) {
+ this.add(values[index]);
+ }
+}
+
+/**
+ * Adds `value` to the array cache.
+ *
+ * @private
+ * @name add
+ * @memberOf SetCache
+ * @alias push
+ * @param {*} value The value to cache.
+ * @returns {Object} Returns the cache instance.
+ */
+function setCacheAdd(value) {
+ this.__data__.set(value, HASH_UNDEFINED);
+ return this;
+}
+
+/**
+ * Checks if `value` is in the array cache.
+ *
+ * @private
+ * @name has
+ * @memberOf SetCache
+ * @param {*} value The value to search for.
+ * @returns {number} Returns `true` if `value` is found, else `false`.
+ */
+function setCacheHas(value) {
+ return this.__data__.has(value);
+}
+
+// Add methods to `SetCache`.
+SetCache.prototype.add = SetCache.prototype.push = setCacheAdd;
+SetCache.prototype.has = setCacheHas;
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.uniqBy` without support for iteratee shorthands.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {Function} [iteratee] The iteratee invoked per element.
+ * @param {Function} [comparator] The comparator invoked per element.
+ * @returns {Array} Returns the new duplicate free array.
+ */
+function baseUniq(array, iteratee, comparator) {
+ var index = -1,
+ includes = arrayIncludes,
+ length = array.length,
+ isCommon = true,
+ result = [],
+ seen = result;
+
+ if (comparator) {
+ isCommon = false;
+ includes = arrayIncludesWith;
+ }
+ else if (length >= LARGE_ARRAY_SIZE) {
+ var set = iteratee ? null : createSet(array);
+ if (set) {
+ return setToArray(set);
+ }
+ isCommon = false;
+ includes = cacheHas;
+ seen = new SetCache;
+ }
+ else {
+ seen = iteratee ? [] : result;
+ }
+ outer:
+ while (++index < length) {
+ var value = array[index],
+ computed = iteratee ? iteratee(value) : value;
+
+ value = (comparator || value !== 0) ? value : 0;
+ if (isCommon && computed === computed) {
+ var seenIndex = seen.length;
+ while (seenIndex--) {
+ if (seen[seenIndex] === computed) {
+ continue outer;
+ }
+ }
+ if (iteratee) {
+ seen.push(computed);
+ }
+ result.push(value);
+ }
+ else if (!includes(seen, computed, comparator)) {
+ if (seen !== result) {
+ seen.push(computed);
+ }
+ result.push(value);
+ }
+ }
+ return result;
+}
+
+/**
+ * Creates a set object of `values`.
+ *
+ * @private
+ * @param {Array} values The values to add to the set.
+ * @returns {Object} Returns the new set.
+ */
+var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) {
+ return new Set(values);
+};
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a duplicate-free version of an array, using
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * for equality comparisons, in which only the first occurrence of each
+ * element is kept.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Array
+ * @param {Array} array The array to inspect.
+ * @returns {Array} Returns the new duplicate free array.
+ * @example
+ *
+ * _.uniq([2, 1, 2]);
+ * // => [2, 1]
+ */
+function uniq(array) {
+ return (array && array.length)
+ ? baseUniq(array)
+ : [];
+}
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * This method returns `undefined`.
+ *
+ * @static
+ * @memberOf _
+ * @since 2.3.0
+ * @category Util
+ * @example
+ *
+ * _.times(2, _.noop);
+ * // => [undefined, undefined]
+ */
+function noop() {
+ // No operation performed.
+}
+
+module.exports = uniq;
+
+
/***/ }),
/***/ 129:
@@ -2051,6 +5250,42 @@ const run_1 = __webpack_require__(180);
run_1.run();
+/***/ }),
+
+/***/ 135:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = Octokit;
+
+const { request } = __webpack_require__(753);
+const Hook = __webpack_require__(523);
+
+const parseClientOptions = __webpack_require__(36);
+
+function Octokit(plugins, options) {
+ options = options || {};
+ const hook = new Hook.Collection();
+ const log = Object.assign(
+ {
+ debug: () => {},
+ info: () => {},
+ warn: console.warn,
+ error: console.error
+ },
+ options && options.log
+ );
+ const api = {
+ hook,
+ log,
+ request: request.defaults(parseClientOptions(options, log, hook))
+ };
+
+ plugins.forEach(pluginFunction => pluginFunction(api, options));
+
+ return api;
+}
+
+
/***/ }),
/***/ 139:
@@ -2066,6 +5301,56 @@ module.exports = function nodeRNG() {
};
+/***/ }),
+
+/***/ 140:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationBeforeRequest;
+
+const btoa = __webpack_require__(675);
+const uniq = __webpack_require__(126);
+
+function authenticationBeforeRequest(state, options) {
+ if (!state.auth.type) {
+ return;
+ }
+
+ if (state.auth.type === "basic") {
+ const hash = btoa(`${state.auth.username}:${state.auth.password}`);
+ options.headers.authorization = `Basic ${hash}`;
+ return;
+ }
+
+ if (state.auth.type === "token") {
+ options.headers.authorization = `token ${state.auth.token}`;
+ return;
+ }
+
+ if (state.auth.type === "app") {
+ options.headers.authorization = `Bearer ${state.auth.token}`;
+ const acceptHeaders = options.headers.accept
+ .split(",")
+ .concat("application/vnd.github.machine-man-preview+json");
+ options.headers.accept = uniq(acceptHeaders)
+ .filter(Boolean)
+ .join(",");
+ return;
+ }
+
+ options.url += options.url.indexOf("?") === -1 ? "?" : "&";
+
+ if (state.auth.token) {
+ options.url += `access_token=${encodeURIComponent(state.auth.token)}`;
+ return;
+ }
+
+ const key = encodeURIComponent(state.auth.key);
+ const secret = encodeURIComponent(state.auth.secret);
+ options.url += `client_id=${key}&client_secret=${secret}`;
+}
+
+
/***/ }),
/***/ 141:
@@ -2077,7 +5362,7 @@ module.exports = function nodeRNG() {
var net = __webpack_require__(631);
var tls = __webpack_require__(16);
var http = __webpack_require__(605);
-var https = __webpack_require__(211);
+var https = __webpack_require__(34);
var events = __webpack_require__(614);
var assert = __webpack_require__(357);
var util = __webpack_require__(669);
@@ -2338,6 +5623,900 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
exports.debug = debug; // for test
+/***/ }),
+
+/***/ 145:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const pump = __webpack_require__(453);
+const bufferStream = __webpack_require__(966);
+
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
+}
+
+function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ options = Object.assign({maxBuffer: Infinity}, options);
+
+ const {maxBuffer} = options;
+
+ let stream;
+ return new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ if (error) { // A null check
+ error.bufferedData = stream.getBufferedValue();
+ }
+ reject(error);
+ };
+
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
+
+ resolve();
+ });
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ }).then(() => stream.getBufferedValue());
+}
+
+module.exports = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
+module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
+module.exports.MaxBufferError = MaxBufferError;
+
+
+/***/ }),
+
+/***/ 150:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+/*!
+ * Tmp
+ *
+ * Copyright (c) 2011-2017 KARASZI Istvan
+ *
+ * MIT Licensed
+ */
+
+/*
+ * Module dependencies.
+ */
+const fs = __webpack_require__(747);
+const os = __webpack_require__(87);
+const path = __webpack_require__(622);
+const crypto = __webpack_require__(417);
+const _c = { fs: fs.constants, os: os.constants };
+const rimraf = __webpack_require__(13);
+
+/*
+ * The working inner variables.
+ */
+const
+ // the random characters to choose from
+ RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
+
+ TEMPLATE_PATTERN = /XXXXXX/,
+
+ DEFAULT_TRIES = 3,
+
+ CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
+
+ // constants are off on the windows platform and will not match the actual errno codes
+ IS_WIN32 = os.platform() === 'win32',
+ EBADF = _c.EBADF || _c.os.errno.EBADF,
+ ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
+
+ DIR_MODE = 0o700 /* 448 */,
+ FILE_MODE = 0o600 /* 384 */,
+
+ EXIT = 'exit',
+
+ // this will hold the objects need to be removed on exit
+ _removeObjects = [],
+
+ // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
+ FN_RMDIR_SYNC = fs.rmdirSync.bind(fs),
+ FN_RIMRAF_SYNC = rimraf.sync;
+
+let
+ _gracefulCleanup = false;
+
+/**
+ * Gets a temporary file name.
+ *
+ * @param {(Options|tmpNameCallback)} options options or callback
+ * @param {?tmpNameCallback} callback the callback function
+ */
+function tmpName(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ try {
+ _assertAndSanitizeOptions(opts);
+ } catch (err) {
+ return cb(err);
+ }
+
+ let tries = opts.tries;
+ (function _getUniqueName() {
+ try {
+ const name = _generateTmpName(opts);
+
+ // check whether the path exists then retry if needed
+ fs.stat(name, function (err) {
+ /* istanbul ignore else */
+ if (!err) {
+ /* istanbul ignore else */
+ if (tries-- > 0) return _getUniqueName();
+
+ return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
+ }
+
+ cb(null, name);
+ });
+ } catch (err) {
+ cb(err);
+ }
+ }());
+}
+
+/**
+ * Synchronous version of tmpName.
+ *
+ * @param {Object} options
+ * @returns {string} the generated random name
+ * @throws {Error} if the options are invalid or could not generate a filename
+ */
+function tmpNameSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ _assertAndSanitizeOptions(opts);
+
+ let tries = opts.tries;
+ do {
+ const name = _generateTmpName(opts);
+ try {
+ fs.statSync(name);
+ } catch (e) {
+ return name;
+ }
+ } while (tries-- > 0);
+
+ throw new Error('Could not get a unique tmp filename, max tries reached');
+}
+
+/**
+ * Creates and opens a temporary file.
+ *
+ * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined
+ * @param {?fileCallback} callback
+ */
+function file(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ // gets a temporary filename
+ tmpName(opts, function _tmpNameCreated(err, name) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ // create and open the file
+ fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
+ /* istanbu ignore else */
+ if (err) return cb(err);
+
+ if (opts.discardDescriptor) {
+ return fs.close(fd, function _discardCallback(possibleErr) {
+ // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
+ return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false));
+ });
+ } else {
+ // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
+ // about the descriptor
+ const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
+ cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false));
+ }
+ });
+ });
+}
+
+/**
+ * Synchronous version of file.
+ *
+ * @param {Options} options
+ * @returns {FileSyncObject} object consists of name, fd and removeCallback
+ * @throws {Error} if cannot create a file
+ */
+function fileSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
+ const name = tmpNameSync(opts);
+ var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
+ /* istanbul ignore else */
+ if (opts.discardDescriptor) {
+ fs.closeSync(fd);
+ fd = undefined;
+ }
+
+ return {
+ name: name,
+ fd: fd,
+ removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true)
+ };
+}
+
+/**
+ * Creates a temporary directory.
+ *
+ * @param {(Options|dirCallback)} options the options or the callback function
+ * @param {?dirCallback} callback
+ */
+function dir(options, callback) {
+ const
+ args = _parseArguments(options, callback),
+ opts = args[0],
+ cb = args[1];
+
+ // gets a temporary filename
+ tmpName(opts, function _tmpNameCreated(err, name) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ // create the directory
+ fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
+ /* istanbul ignore else */
+ if (err) return cb(err);
+
+ cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false));
+ });
+ });
+}
+
+/**
+ * Synchronous version of dir.
+ *
+ * @param {Options} options
+ * @returns {DirSyncObject} object consists of name and removeCallback
+ * @throws {Error} if it cannot create a directory
+ */
+function dirSync(options) {
+ const
+ args = _parseArguments(options),
+ opts = args[0];
+
+ const name = tmpNameSync(opts);
+ fs.mkdirSync(name, opts.mode || DIR_MODE);
+
+ return {
+ name: name,
+ removeCallback: _prepareTmpDirRemoveCallback(name, opts, true)
+ };
+}
+
+/**
+ * Removes files asynchronously.
+ *
+ * @param {Object} fdPath
+ * @param {Function} next
+ * @private
+ */
+function _removeFileAsync(fdPath, next) {
+ const _handler = function (err) {
+ if (err && !_isENOENT(err)) {
+ // reraise any unanticipated error
+ return next(err);
+ }
+ next();
+ };
+
+ if (0 <= fdPath[0])
+ fs.close(fdPath[0], function () {
+ fs.unlink(fdPath[1], _handler);
+ });
+ else fs.unlink(fdPath[1], _handler);
+}
+
+/**
+ * Removes files synchronously.
+ *
+ * @param {Object} fdPath
+ * @private
+ */
+function _removeFileSync(fdPath) {
+ let rethrownException = null;
+ try {
+ if (0 <= fdPath[0]) fs.closeSync(fdPath[0]);
+ } catch (e) {
+ // reraise any unanticipated error
+ if (!_isEBADF(e) && !_isENOENT(e)) throw e;
+ } finally {
+ try {
+ fs.unlinkSync(fdPath[1]);
+ }
+ catch (e) {
+ // reraise any unanticipated error
+ if (!_isENOENT(e)) rethrownException = e;
+ }
+ }
+ if (rethrownException !== null) {
+ throw rethrownException;
+ }
+}
+
+/**
+ * Prepares the callback for removal of the temporary file.
+ *
+ * Returns either a sync callback or a async callback depending on whether
+ * fileSync or file was called, which is expressed by the sync parameter.
+ *
+ * @param {string} name the path of the file
+ * @param {number} fd file descriptor
+ * @param {Object} opts
+ * @param {boolean} sync
+ * @returns {fileCallback | fileCallbackSync}
+ * @private
+ */
+function _prepareTmpFileRemoveCallback(name, fd, opts, sync) {
+ const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync);
+ const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync);
+
+ if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
+
+ return sync ? removeCallbackSync : removeCallback;
+}
+
+/**
+ * Prepares the callback for removal of the temporary directory.
+ *
+ * Returns either a sync callback or a async callback depending on whether
+ * tmpFileSync or tmpFile was called, which is expressed by the sync parameter.
+ *
+ * @param {string} name
+ * @param {Object} opts
+ * @param {boolean} sync
+ * @returns {Function} the callback
+ * @private
+ */
+function _prepareTmpDirRemoveCallback(name, opts, sync) {
+ const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs);
+ const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC;
+ const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync);
+ const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync);
+ if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
+
+ return sync ? removeCallbackSync : removeCallback;
+}
+
+/**
+ * Creates a guarded function wrapping the removeFunction call.
+ *
+ * The cleanup callback is save to be called multiple times.
+ * Subsequent invocations will be ignored.
+ *
+ * @param {Function} removeFunction
+ * @param {string} fileOrDirName
+ * @param {boolean} sync
+ * @param {cleanupCallbackSync?} cleanupCallbackSync
+ * @returns {cleanupCallback | cleanupCallbackSync}
+ * @private
+ */
+function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) {
+ let called = false;
+
+ // if sync is true, the next parameter will be ignored
+ return function _cleanupCallback(next) {
+
+ /* istanbul ignore else */
+ if (!called) {
+ // remove cleanupCallback from cache
+ const toRemove = cleanupCallbackSync || _cleanupCallback;
+ const index = _removeObjects.indexOf(toRemove);
+ /* istanbul ignore else */
+ if (index >= 0) _removeObjects.splice(index, 1);
+
+ called = true;
+ if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) {
+ return removeFunction(fileOrDirName);
+ } else {
+ return removeFunction(fileOrDirName, next || function() {});
+ }
+ }
+ };
+}
+
+/**
+ * The garbage collector.
+ *
+ * @private
+ */
+function _garbageCollector() {
+ /* istanbul ignore else */
+ if (!_gracefulCleanup) return;
+
+ // the function being called removes itself from _removeObjects,
+ // loop until _removeObjects is empty
+ while (_removeObjects.length) {
+ try {
+ _removeObjects[0]();
+ } catch (e) {
+ // already removed?
+ }
+ }
+}
+
+/**
+ * Random name generator based on crypto.
+ * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
+ *
+ * @param {number} howMany
+ * @returns {string} the generated random name
+ * @private
+ */
+function _randomChars(howMany) {
+ let
+ value = [],
+ rnd = null;
+
+ // make sure that we do not fail because we ran out of entropy
+ try {
+ rnd = crypto.randomBytes(howMany);
+ } catch (e) {
+ rnd = crypto.pseudoRandomBytes(howMany);
+ }
+
+ for (var i = 0; i < howMany; i++) {
+ value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
+ }
+
+ return value.join('');
+}
+
+/**
+ * Helper which determines whether a string s is blank, that is undefined, or empty or null.
+ *
+ * @private
+ * @param {string} s
+ * @returns {Boolean} true whether the string s is blank, false otherwise
+ */
+function _isBlank(s) {
+ return s === null || _isUndefined(s) || !s.trim();
+}
+
+/**
+ * Checks whether the `obj` parameter is defined or not.
+ *
+ * @param {Object} obj
+ * @returns {boolean} true if the object is undefined
+ * @private
+ */
+function _isUndefined(obj) {
+ return typeof obj === 'undefined';
+}
+
+/**
+ * Parses the function arguments.
+ *
+ * This function helps to have optional arguments.
+ *
+ * @param {(Options|null|undefined|Function)} options
+ * @param {?Function} callback
+ * @returns {Array} parsed arguments
+ * @private
+ */
+function _parseArguments(options, callback) {
+ /* istanbul ignore else */
+ if (typeof options === 'function') {
+ return [{}, options];
+ }
+
+ /* istanbul ignore else */
+ if (_isUndefined(options)) {
+ return [{}, callback];
+ }
+
+ // copy options so we do not leak the changes we make internally
+ const actualOptions = {};
+ for (const key of Object.getOwnPropertyNames(options)) {
+ actualOptions[key] = options[key];
+ }
+
+ return [actualOptions, callback];
+}
+
+/**
+ * Generates a new temporary name.
+ *
+ * @param {Object} opts
+ * @returns {string} the new random name according to opts
+ * @private
+ */
+function _generateTmpName(opts) {
+
+ const tmpDir = opts.tmpdir;
+
+ /* istanbul ignore else */
+ if (!_isUndefined(opts.name))
+ return path.join(tmpDir, opts.dir, opts.name);
+
+ /* istanbul ignore else */
+ if (!_isUndefined(opts.template))
+ return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6));
+
+ // prefix and postfix
+ const name = [
+ opts.prefix ? opts.prefix : 'tmp',
+ '-',
+ process.pid,
+ '-',
+ _randomChars(12),
+ opts.postfix ? '-' + opts.postfix : ''
+ ].join('');
+
+ return path.join(tmpDir, opts.dir, name);
+}
+
+/**
+ * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing
+ * options.
+ *
+ * @param {Options} options
+ * @private
+ */
+function _assertAndSanitizeOptions(options) {
+
+ options.tmpdir = _getTmpDir(options);
+
+ const tmpDir = options.tmpdir;
+
+ /* istanbul ignore else */
+ if (!_isUndefined(options.name))
+ _assertIsRelative(options.name, 'name', tmpDir);
+ /* istanbul ignore else */
+ if (!_isUndefined(options.dir))
+ _assertIsRelative(options.dir, 'dir', tmpDir);
+ /* istanbul ignore else */
+ if (!_isUndefined(options.template)) {
+ _assertIsRelative(options.template, 'template', tmpDir);
+ if (!options.template.match(TEMPLATE_PATTERN))
+ throw new Error(`Invalid template, found "${options.template}".`);
+ }
+ /* istanbul ignore else */
+ if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0)
+ throw new Error(`Invalid tries, found "${options.tries}".`);
+
+ // if a name was specified we will try once
+ options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1;
+ options.keep = !!options.keep;
+ options.detachDescriptor = !!options.detachDescriptor;
+ options.discardDescriptor = !!options.discardDescriptor;
+ options.unsafeCleanup = !!options.unsafeCleanup;
+
+ // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
+ options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir));
+ options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir));
+ // sanitize further if template is relative to options.dir
+ options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template);
+
+ // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
+ options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name);
+ options.prefix = _isUndefined(options.prefix) ? '' : options.prefix;
+ options.postfix = _isUndefined(options.postfix) ? '' : options.postfix;
+}
+
+/**
+ * Resolve the specified path name in respect to tmpDir.
+ *
+ * The specified name might include relative path components, e.g. ../
+ * so we need to resolve in order to be sure that is is located inside tmpDir
+ *
+ * @param name
+ * @param tmpDir
+ * @returns {string}
+ * @private
+ */
+function _resolvePath(name, tmpDir) {
+ const sanitizedName = _sanitizeName(name);
+ if (sanitizedName.startsWith(tmpDir)) {
+ return path.resolve(sanitizedName);
+ } else {
+ return path.resolve(path.join(tmpDir, sanitizedName));
+ }
+}
+
+/**
+ * Sanitize the specified path name by removing all quote characters.
+ *
+ * @param name
+ * @returns {string}
+ * @private
+ */
+function _sanitizeName(name) {
+ if (_isBlank(name)) {
+ return name;
+ }
+ return name.replace(/["']/g, '');
+}
+
+/**
+ * Asserts whether specified name is relative to the specified tmpDir.
+ *
+ * @param {string} name
+ * @param {string} option
+ * @param {string} tmpDir
+ * @throws {Error}
+ * @private
+ */
+function _assertIsRelative(name, option, tmpDir) {
+ if (option === 'name') {
+ // assert that name is not absolute and does not contain a path
+ if (path.isAbsolute(name))
+ throw new Error(`${option} option must not contain an absolute path, found "${name}".`);
+ // must not fail on valid . or .. or similar such constructs
+ let basename = path.basename(name);
+ if (basename === '..' || basename === '.' || basename !== name)
+ throw new Error(`${option} option must not contain a path, found "${name}".`);
+ }
+ else { // if (option === 'dir' || option === 'template') {
+ // assert that dir or template are relative to tmpDir
+ if (path.isAbsolute(name) && !name.startsWith(tmpDir)) {
+ throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`);
+ }
+ let resolvedPath = _resolvePath(name, tmpDir);
+ if (!resolvedPath.startsWith(tmpDir))
+ throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`);
+ }
+}
+
+/**
+ * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
+ *
+ * @private
+ */
+function _isEBADF(error) {
+ return _isExpectedError(error, -EBADF, 'EBADF');
+}
+
+/**
+ * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
+ *
+ * @private
+ */
+function _isENOENT(error) {
+ return _isExpectedError(error, -ENOENT, 'ENOENT');
+}
+
+/**
+ * Helper to determine whether the expected error code matches the actual code and errno,
+ * which will differ between the supported node versions.
+ *
+ * - Node >= 7.0:
+ * error.code {string}
+ * error.errno {number} any numerical value will be negated
+ *
+ * CAVEAT
+ *
+ * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT
+ * is no different here.
+ *
+ * @param {SystemError} error
+ * @param {number} errno
+ * @param {string} code
+ * @private
+ */
+function _isExpectedError(error, errno, code) {
+ return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno;
+}
+
+/**
+ * Sets the graceful cleanup.
+ *
+ * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the
+ * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary
+ * object removals.
+ */
+function setGracefulCleanup() {
+ _gracefulCleanup = true;
+}
+
+/**
+ * Returns the currently configured tmp dir from os.tmpdir().
+ *
+ * @private
+ * @param {?Options} options
+ * @returns {string} the currently configured tmp dir
+ */
+function _getTmpDir(options) {
+ return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir()));
+}
+
+// Install process exit listener
+process.addListener(EXIT, _garbageCollector);
+
+/**
+ * Configuration options.
+ *
+ * @typedef {Object} Options
+ * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected
+ * @property {?number} tries the number of tries before give up the name generation
+ * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files
+ * @property {?string} template the "mkstemp" like filename template
+ * @property {?string} name fixed name relative to tmpdir or the specified dir option
+ * @property {?string} dir tmp directory relative to the root tmp directory in use
+ * @property {?string} prefix prefix for the generated name
+ * @property {?string} postfix postfix for the generated name
+ * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir
+ * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty
+ * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection
+ * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection
+ */
+
+/**
+ * @typedef {Object} FileSyncObject
+ * @property {string} name the name of the file
+ * @property {string} fd the file descriptor or -1 if the fd has been discarded
+ * @property {fileCallback} removeCallback the callback function to remove the file
+ */
+
+/**
+ * @typedef {Object} DirSyncObject
+ * @property {string} name the name of the directory
+ * @property {fileCallback} removeCallback the callback function to remove the directory
+ */
+
+/**
+ * @callback tmpNameCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ */
+
+/**
+ * @callback fileCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {number} fd the file descriptor or -1 if the fd had been discarded
+ * @param {cleanupCallback} fn the cleanup callback function
+ */
+
+/**
+ * @callback fileCallbackSync
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {number} fd the file descriptor or -1 if the fd had been discarded
+ * @param {cleanupCallbackSync} fn the cleanup callback function
+ */
+
+/**
+ * @callback dirCallback
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {cleanupCallback} fn the cleanup callback function
+ */
+
+/**
+ * @callback dirCallbackSync
+ * @param {?Error} err the error object if anything goes wrong
+ * @param {string} name the temporary file name
+ * @param {cleanupCallbackSync} fn the cleanup callback function
+ */
+
+/**
+ * Removes the temporary created file or directory.
+ *
+ * @callback cleanupCallback
+ * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed
+ */
+
+/**
+ * Removes the temporary created file or directory.
+ *
+ * @callback cleanupCallbackSync
+ */
+
+/**
+ * Callback function for function composition.
+ * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
+ *
+ * @callback simpleCallback
+ */
+
+// exporting all the needed methods
+
+// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
+// allow users to reconfigure the temporary directory
+Object.defineProperty(module.exports, 'tmpdir', {
+ enumerable: true,
+ configurable: false,
+ get: function () {
+ return _getTmpDir();
+ }
+});
+
+module.exports.dir = dir;
+module.exports.dirSync = dirSync;
+
+module.exports.file = file;
+module.exports.fileSync = fileSync;
+
+module.exports.tmpName = tmpName;
+module.exports.tmpNameSync = tmpNameSync;
+
+module.exports.setGracefulCleanup = setGracefulCleanup;
+
+
+/***/ }),
+
+/***/ 168:
+/***/ (function(module) {
+
+"use strict";
+
+const alias = ['stdin', 'stdout', 'stderr'];
+
+const hasAlias = opts => alias.some(x => Boolean(opts[x]));
+
+module.exports = opts => {
+ if (!opts) {
+ return null;
+ }
+
+ if (opts.stdio && hasAlias(opts)) {
+ throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
+ }
+
+ if (typeof opts.stdio === 'string') {
+ return opts.stdio;
+ }
+
+ const stdio = opts.stdio || [];
+
+ if (!Array.isArray(stdio)) {
+ throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
+ }
+
+ const result = [];
+ const len = Math.max(stdio.length, alias.length);
+
+ for (let i = 0; i < len; i++) {
+ let value = null;
+
+ if (stdio[i] !== undefined) {
+ value = stdio[i];
+ } else if (opts[alias[i]] !== undefined) {
+ value = opts[alias[i]];
+ }
+
+ result[i] = value;
+ }
+
+ return result;
+};
+
+
/***/ }),
/***/ 180:
@@ -2363,20 +6542,78 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
+const github = __importStar(__webpack_require__(469));
const child_process_1 = __webpack_require__(129);
const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
+const tmp_1 = __webpack_require__(150);
const util_1 = __webpack_require__(669);
const cache_1 = __webpack_require__(722);
const install_1 = __webpack_require__(655);
const version_1 = __webpack_require__(52);
const execShellCommand = util_1.promisify(child_process_1.exec);
+const writeFile = util_1.promisify(fs.writeFile);
+const createTempDir = util_1.promisify(tmp_1.dir);
function prepareLint() {
return __awaiter(this, void 0, void 0, function* () {
const versionConfig = yield version_1.findLintVersion();
return yield install_1.installLint(versionConfig);
});
}
+function fetchPatch() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const onlyNewIssues = core.getInput(`only-new-issues`, { required: true }).trim();
+ if (onlyNewIssues !== `false` && onlyNewIssues !== `true`) {
+ throw new Error(`invalid value of "only-new-issues": "${onlyNewIssues}", expected "true" or "false"`);
+ }
+ if (onlyNewIssues === `false`) {
+ return ``;
+ }
+ const ctx = github.context;
+ if (ctx.eventName !== `pull_request`) {
+ core.info(`Not fetching patch for showing only new issues because it's not a pull request context: event name is ${ctx.eventName}`);
+ return ``;
+ }
+ const pull = ctx.payload.pull_request;
+ if (!pull) {
+ core.warning(`No pull request in context`);
+ return ``;
+ }
+ const octokit = new github.GitHub(core.getInput(`github-token`, { required: true }));
+ let patch;
+ try {
+ const patchResp = yield octokit.pulls.get({
+ owner: ctx.repo.owner,
+ repo: ctx.repo.repo,
+ [`pull_number`]: pull.number,
+ mediaType: {
+ format: `diff`,
+ },
+ });
+ if (patchResp.status !== 200) {
+ core.warning(`failed to fetch pull request patch: response status is ${patchResp.status}`);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ patch = patchResp.data;
+ }
+ catch (err) {
+ console.warn(`failed to fetch pull request patch:`, err);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ try {
+ const tempDir = yield createTempDir();
+ const patchPath = path.join(tempDir, "pull.patch");
+ core.info(`Writing patch to ${patchPath}`);
+ yield writeFile(patchPath, patch);
+ return patchPath;
+ }
+ catch (err) {
+ console.warn(`failed to save pull request patch:`, err);
+ return ``; // don't fail the action, but analyze without patch
+ }
+ });
+}
function prepareEnv() {
return __awaiter(this, void 0, void 0, function* () {
const startedAt = Date.now();
@@ -2384,11 +6621,13 @@ function prepareEnv() {
const restoreCachePromise = cache_1.restoreCache();
const prepareLintPromise = prepareLint();
const installGoPromise = install_1.installGo();
+ const patchPromise = fetchPatch();
const lintPath = yield prepareLintPromise;
yield installGoPromise;
yield restoreCachePromise;
+ const patchPath = yield patchPromise;
core.info(`Prepared env in ${Date.now() - startedAt}ms`);
- return lintPath;
+ return { lintPath, patchPath };
});
}
const printOutput = (res) => {
@@ -2399,26 +6638,47 @@ const printOutput = (res) => {
core.info(res.stderr);
}
};
-function runLint(lintPath) {
+function runLint(lintPath, patchPath) {
return __awaiter(this, void 0, void 0, function* () {
const debug = core.getInput(`debug`);
if (debug.split(`,`).includes(`cache`)) {
const res = yield execShellCommand(`${lintPath} cache status`);
printOutput(res);
}
- const args = core.getInput(`args`);
- if (args.includes(`-out-format`)) {
+ const userArgs = core.getInput(`args`);
+ const addedArgs = [];
+ const userArgNames = new Set();
+ userArgs
+ .split(/\s/)
+ .map(arg => arg.split(`=`)[0])
+ .filter(arg => arg.startsWith(`-`))
+ .forEach(arg => {
+ userArgNames.add(arg.replace(`-`, ``));
+ });
+ if (userArgNames.has(`out-format`)) {
throw new Error(`please, don't change out-format for golangci-lint: it can be broken in a future`);
}
+ addedArgs.push(`--out-format=github-actions`);
+ if (patchPath && (userArgNames.has(`new`) || userArgNames.has(`new-from-rev`) || userArgNames.has(`new-from-patch`))) {
+ throw new Error(`please, don't specify manually --new* args when requesting only new issues`);
+ }
+ addedArgs.push(`--new-from-patch=${patchPath}`);
+ // Override config values.
+ addedArgs.push(`--new=false`);
+ addedArgs.push(`--new-from-rev=`);
const workingDirectory = core.getInput(`working-directory`);
const cmdArgs = {};
- if (workingDirectory != ``) {
+ if (workingDirectory) {
+ if (patchPath) {
+ // TODO: make them compatible
+ throw new Error(`options working-directory and only-new-issues aren't compatible`);
+ }
if (!fs.existsSync(workingDirectory) || !fs.lstatSync(workingDirectory).isDirectory()) {
throw new Error(`working-directory (${workingDirectory}) was not a path`);
}
cmdArgs.cwd = path.resolve(workingDirectory);
}
- const cmd = `${lintPath} run --out-format=github-actions ${args}`.trimRight();
+ const cmd = `${lintPath} run ${addedArgs.join(` `)} ${userArgs}`.trimRight();
core.info(`Running [${cmd}] in [${cmdArgs.cwd}] ...`);
const startedAt = Date.now();
try {
@@ -2443,8 +6703,8 @@ function runLint(lintPath) {
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
- const lintPath = yield core.group(`prepare environment`, prepareEnv);
- yield core.group(`run golangci-lint`, () => runLint(lintPath));
+ const { lintPath, patchPath } = yield core.group(`prepare environment`, prepareEnv);
+ yield core.group(`run golangci-lint`, () => runLint(lintPath, patchPath));
}
catch (error) {
core.error(`Failed to run: ${error}, ${error.stack}`);
@@ -2467,6 +6727,54 @@ function postRun() {
exports.postRun = postRun;
+/***/ }),
+
+/***/ 197:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = isexe
+isexe.sync = sync
+
+var fs = __webpack_require__(747)
+
+function isexe (path, options, cb) {
+ fs.stat(path, function (er, stat) {
+ cb(er, er ? false : checkStat(stat, options))
+ })
+}
+
+function sync (path, options) {
+ return checkStat(fs.statSync(path), options)
+}
+
+function checkStat (stat, options) {
+ return stat.isFile() && checkMode(stat, options)
+}
+
+function checkMode (stat, options) {
+ var mod = stat.mode
+ var uid = stat.uid
+ var gid = stat.gid
+
+ var myUid = options.uid !== undefined ?
+ options.uid : process.getuid && process.getuid()
+ var myGid = options.gid !== undefined ?
+ options.gid : process.getgid && process.getgid()
+
+ var u = parseInt('100', 8)
+ var g = parseInt('010', 8)
+ var o = parseInt('001', 8)
+ var ug = u | g
+
+ var ret = (mod & o) ||
+ (mod & g) && gid === myGid ||
+ (mod & u) && uid === myUid ||
+ (mod & ug) && myUid === 0
+
+ return ret
+}
+
+
/***/ }),
/***/ 206:
@@ -2494,7 +6802,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(342));
-const constants_1 = __webpack_require__(929);
+const constants_1 = __webpack_require__(573);
const tar_1 = __webpack_require__(570);
const utils = __importStar(__webpack_require__(867));
function run() {
@@ -2577,9 +6885,32 @@ exports.default = run;
/***/ }),
/***/ 211:
-/***/ (function(module) {
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ return "";
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
-module.exports = require("https");
/***/ }),
@@ -2647,9 +6978,24549 @@ class PersonalAccessTokenCredentialHandler {
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+/***/ }),
+
+/***/ 240:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationRequestError;
+
+const { RequestError } = __webpack_require__(102);
+
+function authenticationRequestError(state, error, options) {
+ /* istanbul ignore next */
+ if (!error.headers) throw error;
+
+ const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
+ // handle "2FA required" error only
+ if (error.status !== 401 || !otpRequired) {
+ throw error;
+ }
+
+ if (
+ error.status === 401 &&
+ otpRequired &&
+ error.request &&
+ error.request.headers["x-github-otp"]
+ ) {
+ throw new RequestError(
+ "Invalid one-time password for two-factor authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ if (typeof state.auth.on2fa !== "function") {
+ throw new RequestError(
+ "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ return Promise.resolve()
+ .then(() => {
+ return state.auth.on2fa();
+ })
+ .then(oneTimePassword => {
+ const newOptions = Object.assign(options, {
+ headers: Object.assign(
+ { "x-github-otp": oneTimePassword },
+ options.headers
+ )
+ });
+ return state.octokit.request(newOptions);
+ });
+}
+
+
+/***/ }),
+
+/***/ 245:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = globSync
+globSync.GlobSync = GlobSync
+
+var fs = __webpack_require__(747)
+var rp = __webpack_require__(302)
+var minimatch = __webpack_require__(93)
+var Minimatch = minimatch.Minimatch
+var Glob = __webpack_require__(402).Glob
+var util = __webpack_require__(669)
+var path = __webpack_require__(622)
+var assert = __webpack_require__(357)
+var isAbsolute = __webpack_require__(681)
+var common = __webpack_require__(644)
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+function globSync (pattern, options) {
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ return new GlobSync(pattern, options).found
+}
+
+function GlobSync (pattern, options) {
+ if (!pattern)
+ throw new Error('must provide pattern')
+
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ if (!(this instanceof GlobSync))
+ return new GlobSync(pattern, options)
+
+ setopts(this, pattern, options)
+
+ if (this.noprocess)
+ return this
+
+ var n = this.minimatch.set.length
+ this.matches = new Array(n)
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false)
+ }
+ this._finish()
+}
+
+GlobSync.prototype._finish = function () {
+ assert(this instanceof GlobSync)
+ if (this.realpath) {
+ var self = this
+ this.matches.forEach(function (matchset, index) {
+ var set = self.matches[index] = Object.create(null)
+ for (var p in matchset) {
+ try {
+ p = self._makeAbs(p)
+ var real = rp.realpathSync(p, self.realpathCache)
+ set[real] = true
+ } catch (er) {
+ if (er.syscall === 'stat')
+ set[self._makeAbs(p)] = true
+ else
+ throw er
+ }
+ }
+ })
+ }
+ common.finish(this)
+}
+
+
+GlobSync.prototype._process = function (pattern, index, inGlobStar) {
+ assert(this instanceof GlobSync)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // See if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip processing
+ if (childrenIgnored(this, read))
+ return
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
+}
+
+
+GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
+ var entries = this._readdir(abs, inGlobStar)
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix.slice(-1) !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix)
+ newPattern = [prefix, e]
+ else
+ newPattern = [e]
+ this._process(newPattern.concat(remain), index, inGlobStar)
+ }
+}
+
+
+GlobSync.prototype._emitMatch = function (index, e) {
+ if (isIgnored(this, e))
+ return
+
+ var abs = this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute) {
+ e = abs
+ }
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ if (this.stat)
+ this._stat(e)
+}
+
+
+GlobSync.prototype._readdirInGlobStar = function (abs) {
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false)
+
+ var entries
+ var lstat
+ var stat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er.code === 'ENOENT') {
+ // lstat failed, doesn't exist
+ return null
+ }
+ }
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ this.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory())
+ this.cache[abs] = 'FILE'
+ else
+ entries = this._readdir(abs, false)
+
+ return entries
+}
+
+GlobSync.prototype._readdir = function (abs, inGlobStar) {
+ var entries
+
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return null
+
+ if (Array.isArray(c))
+ return c
+ }
+
+ try {
+ return this._readdirEntries(abs, fs.readdirSync(abs))
+ } catch (er) {
+ this._readdirError(abs, er)
+ return null
+ }
+}
+
+GlobSync.prototype._readdirEntries = function (abs, entries) {
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+
+ // mark and cache dir-ness
+ return entries
+}
+
+GlobSync.prototype._readdirError = function (f, er) {
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ throw error
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict)
+ throw er
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+}
+
+GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
+
+ var entries = this._readdir(abs, inGlobStar)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false)
+
+ var len = entries.length
+ var isSym = this.symlinks[abs]
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true)
+ }
+}
+
+GlobSync.prototype._processSimple = function (prefix, index) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var exists = this._stat(prefix)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+}
+
+// Returns either 'DIR', 'FILE', or false
+GlobSync.prototype._stat = function (f) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return false
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (!stat) {
+ var lstat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return false
+ }
+ }
+
+ if (lstat && lstat.isSymbolicLink()) {
+ try {
+ stat = fs.statSync(abs)
+ } catch (er) {
+ stat = lstat
+ }
+ } else {
+ stat = lstat
+ }
+ }
+
+ this.statCache[abs] = stat
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ return c
+}
+
+GlobSync.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+GlobSync.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+
+/***/ }),
+
+/***/ 260:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Note: since nyc uses this module to output coverage, any lines
+// that are in the direct sync flow of nyc's outputCoverage are
+// ignored, since we can never get coverage for them.
+var assert = __webpack_require__(357)
+var signals = __webpack_require__(654)
+var isWin = /^win/i.test(process.platform)
+
+var EE = __webpack_require__(614)
+/* istanbul ignore if */
+if (typeof EE !== 'function') {
+ EE = EE.EventEmitter
+}
+
+var emitter
+if (process.__signal_exit_emitter__) {
+ emitter = process.__signal_exit_emitter__
+} else {
+ emitter = process.__signal_exit_emitter__ = new EE()
+ emitter.count = 0
+ emitter.emitted = {}
+}
+
+// Because this emitter is a global, we have to check to see if a
+// previous version of this library failed to enable infinite listeners.
+// I know what you're about to say. But literally everything about
+// signal-exit is a compromise with evil. Get used to it.
+if (!emitter.infinite) {
+ emitter.setMaxListeners(Infinity)
+ emitter.infinite = true
+}
+
+module.exports = function (cb, opts) {
+ assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler')
+
+ if (loaded === false) {
+ load()
+ }
+
+ var ev = 'exit'
+ if (opts && opts.alwaysLast) {
+ ev = 'afterexit'
+ }
+
+ var remove = function () {
+ emitter.removeListener(ev, cb)
+ if (emitter.listeners('exit').length === 0 &&
+ emitter.listeners('afterexit').length === 0) {
+ unload()
+ }
+ }
+ emitter.on(ev, cb)
+
+ return remove
+}
+
+module.exports.unload = unload
+function unload () {
+ if (!loaded) {
+ return
+ }
+ loaded = false
+
+ signals.forEach(function (sig) {
+ try {
+ process.removeListener(sig, sigListeners[sig])
+ } catch (er) {}
+ })
+ process.emit = originalProcessEmit
+ process.reallyExit = originalProcessReallyExit
+ emitter.count -= 1
+}
+
+function emit (event, code, signal) {
+ if (emitter.emitted[event]) {
+ return
+ }
+ emitter.emitted[event] = true
+ emitter.emit(event, code, signal)
+}
+
+// { : , ... }
+var sigListeners = {}
+signals.forEach(function (sig) {
+ sigListeners[sig] = function listener () {
+ // If there are no other listeners, an exit is coming!
+ // Simplest way: remove us and then re-send the signal.
+ // We know that this will kill the process, so we can
+ // safely emit now.
+ var listeners = process.listeners(sig)
+ if (listeners.length === emitter.count) {
+ unload()
+ emit('exit', null, sig)
+ /* istanbul ignore next */
+ emit('afterexit', null, sig)
+ /* istanbul ignore next */
+ if (isWin && sig === 'SIGHUP') {
+ // "SIGHUP" throws an `ENOSYS` error on Windows,
+ // so use a supported signal instead
+ sig = 'SIGINT'
+ }
+ process.kill(process.pid, sig)
+ }
+ }
+})
+
+module.exports.signals = function () {
+ return signals
+}
+
+module.exports.load = load
+
+var loaded = false
+
+function load () {
+ if (loaded) {
+ return
+ }
+ loaded = true
+
+ // This is the number of onSignalExit's that are in play.
+ // It's important so that we can count the correct number of
+ // listeners on signals, and don't wait for the other one to
+ // handle it instead of us.
+ emitter.count += 1
+
+ signals = signals.filter(function (sig) {
+ try {
+ process.on(sig, sigListeners[sig])
+ return true
+ } catch (er) {
+ return false
+ }
+ })
+
+ process.emit = processEmit
+ process.reallyExit = processReallyExit
+}
+
+var originalProcessReallyExit = process.reallyExit
+function processReallyExit (code) {
+ process.exitCode = code || 0
+ emit('exit', process.exitCode, null)
+ /* istanbul ignore next */
+ emit('afterexit', process.exitCode, null)
+ /* istanbul ignore next */
+ originalProcessReallyExit.call(process, process.exitCode)
+}
+
+var originalProcessEmit = process.emit
+function processEmit (ev, arg) {
+ if (ev === 'exit') {
+ if (arg !== undefined) {
+ process.exitCode = arg
+ }
+ var ret = originalProcessEmit.apply(this, arguments)
+ emit('exit', process.exitCode, null)
+ /* istanbul ignore next */
+ emit('afterexit', process.exitCode, null)
+ return ret
+ } else {
+ return originalProcessEmit.apply(this, arguments)
+ }
+}
+
+
+/***/ }),
+
+/***/ 262:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const fs_1 = __webpack_require__(747);
+const os_1 = __webpack_require__(87);
+class Context {
+ /**
+ * Hydrate the context from the environment
+ */
+ constructor() {
+ this.payload = {};
+ if (process.env.GITHUB_EVENT_PATH) {
+ if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ }
+ else {
+ const path = process.env.GITHUB_EVENT_PATH;
+ process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
+ }
+ }
+ this.eventName = process.env.GITHUB_EVENT_NAME;
+ this.sha = process.env.GITHUB_SHA;
+ this.ref = process.env.GITHUB_REF;
+ this.workflow = process.env.GITHUB_WORKFLOW;
+ this.action = process.env.GITHUB_ACTION;
+ this.actor = process.env.GITHUB_ACTOR;
+ }
+ get issue() {
+ const payload = this.payload;
+ return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
+ }
+ get repo() {
+ if (process.env.GITHUB_REPOSITORY) {
+ const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
+ return { owner, repo };
+ }
+ if (this.payload.repository) {
+ return {
+ owner: this.payload.repository.owner.login,
+ repo: this.payload.repository.name
+ };
+ }
+ throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
+ }
+}
+exports.Context = Context;
+//# sourceMappingURL=context.js.map
+
+/***/ }),
+
+/***/ 265:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+const HttpError = __webpack_require__(297)
+
+function getPage (octokit, link, which, headers) {
+ deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ const url = getPageLinks(link)[which]
+
+ if (!url) {
+ const urlError = new HttpError(`No ${which} page found`, 404)
+ return Promise.reject(urlError)
+ }
+
+ const requestOptions = {
+ url,
+ headers: applyAcceptHeader(link, headers)
+ }
+
+ const promise = octokit.request(requestOptions)
+
+ return promise
+}
+
+function applyAcceptHeader (res, headers) {
+ const previous = res.headers && res.headers['x-github-media-type']
+
+ if (!previous || (headers && headers.accept)) {
+ return headers
+ }
+ headers = headers || {}
+ headers.accept = 'application/vnd.' + previous
+ .replace('; param=', '.')
+ .replace('; format=', '+')
+
+ return headers
+}
+
+
/***/ }),
/***/ 280:
+/***/ (function(module) {
+
+module.exports = register
+
+function register (state, name, method, options) {
+ if (typeof method !== 'function') {
+ throw new Error('method for before hook must be a function')
+ }
+
+ if (!options) {
+ options = {}
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options)
+ }, method)()
+ }
+
+ return Promise.resolve()
+ .then(function () {
+ if (!state.registry[name]) {
+ return method(options)
+ }
+
+ return (state.registry[name]).reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options)
+ }, method)()
+ })
+}
+
+
+/***/ }),
+
+/***/ 281:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const internal_globber_1 = __webpack_require__(760);
+/**
+ * Constructs a globber
+ *
+ * @param patterns Patterns separated by newlines
+ * @param options Glob options
+ */
+function create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield internal_globber_1.DefaultGlobber.create(patterns, options);
+ });
+}
+exports.create = create;
+//# sourceMappingURL=glob.js.map
+
+/***/ }),
+
+/***/ 284:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
+/***/ }),
+
+/***/ 297:
+/***/ (function(module) {
+
+module.exports = class HttpError extends Error {
+ constructor (message, code, headers) {
+ super(message)
+
+ // Maintains proper stack trace (only available on V8)
+ /* istanbul ignore next */
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor)
+ }
+
+ this.name = 'HttpError'
+ this.code = code
+ this.headers = headers
+ }
+}
+
+
+/***/ }),
+
+/***/ 302:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __webpack_require__(747)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __webpack_require__(117)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
+/***/ }),
+
+/***/ 306:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var concatMap = __webpack_require__(896);
+var balanced = __webpack_require__(284);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 315:
+/***/ (function(module) {
+
+if (typeof Object.create === 'function') {
+ // implementation from standard node.js 'util' module
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ ctor.prototype = Object.create(superCtor.prototype, {
+ constructor: {
+ value: ctor,
+ enumerable: false,
+ writable: true,
+ configurable: true
+ }
+ })
+ }
+ };
+} else {
+ // old school shim for old browsers
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ var TempCtor = function () {}
+ TempCtor.prototype = superCtor.prototype
+ ctor.prototype = new TempCtor()
+ ctor.prototype.constructor = ctor
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 322:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+const VERSION = "1.1.2";
+
+/**
+ * Some “list” response that can be paginated have a different response structure
+ *
+ * They have a `total_count` key in the response (search also has `incomplete_results`,
+ * /installation/repositories also has `repository_selection`), as well as a key with
+ * the list of the items which name varies from endpoint to endpoint:
+ *
+ * - https://developer.github.com/v3/search/#example (key `items`)
+ * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
+ * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
+ * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
+ * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
+ *
+ * Octokit normalizes these responses so that paginated results are always returned following
+ * the same structure. One challenge is that if the list response has only one page, no Link
+ * header is provided, so this header alone is not sufficient to check wether a response is
+ * paginated or not. For the exceptions with the namespace, a fallback check for the route
+ * paths has to be added in order to normalize the response. We cannot check for the total_count
+ * property because it also exists in the response of Get the combined status for a specific ref.
+ */
+const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/];
+function normalizePaginatedListResponse(octokit, url, response) {
+ const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
+ const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
+ if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way
+ // to retrieve the same information.
+
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+
+ response.data.total_count = totalCount;
+ Object.defineProperty(response.data, namespaceKey, {
+ get() {
+ octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
+ return Array.from(data);
+ }
+
+ });
+}
+
+function iterator(octokit, route, parameters) {
+ const options = octokit.request.endpoint(route, parameters);
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ next() {
+ if (!url) {
+ return Promise.resolve({
+ done: true
+ });
+ }
+
+ return octokit.request({
+ method,
+ url,
+ headers
+ }).then(response => {
+ normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format:
+ // '; rel="next", ; rel="last"'
+ // sets `url` to undefined if "next" URL is not present or `link` header is not set
+
+ url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
+ return {
+ value: response
+ };
+ });
+ }
+
+ })
+ };
+}
+
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = undefined;
+ }
+
+ return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+}
+
+function gather(octokit, results, iterator, mapFn) {
+ return iterator.next().then(result => {
+ if (result.done) {
+ return results;
+ }
+
+ let earlyExit = false;
+
+ function done() {
+ earlyExit = true;
+ }
+
+ results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+
+ if (earlyExit) {
+ return results;
+ }
+
+ return gather(octokit, results, iterator, mapFn);
+ });
+}
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
+ };
+}
+paginateRest.VERSION = VERSION;
+
+exports.paginateRest = paginateRest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 323:
+/***/ (function(module) {
+
+"use strict";
+
+
+var isStream = module.exports = function (stream) {
+ return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function';
+};
+
+isStream.writable = function (stream) {
+ return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object';
+};
+
+isStream.readable = function (stream) {
+ return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object';
+};
+
+isStream.duplex = function (stream) {
+ return isStream.writable(stream) && isStream.readable(stream);
+};
+
+isStream.transform = function (stream) {
+ return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object';
+};
+
+
+/***/ }),
+
+/***/ 327:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+/**
+ * Indicates whether a pattern matches a path
+ */
+var MatchKind;
+(function (MatchKind) {
+ /** Not matched */
+ MatchKind[MatchKind["None"] = 0] = "None";
+ /** Matched if the path is a directory */
+ MatchKind[MatchKind["Directory"] = 1] = "Directory";
+ /** Matched if the path is a regular file */
+ MatchKind[MatchKind["File"] = 2] = "File";
+ /** Matched */
+ MatchKind[MatchKind["All"] = 3] = "All";
+})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
+//# sourceMappingURL=internal-match-kind.js.map
+
+/***/ }),
+
+/***/ 336:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasLastPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasLastPage (link) {
+ deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).last
+}
+
+
+/***/ }),
+
+/***/ 341:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = octokitValidate;
+
+const validate = __webpack_require__(63);
+
+function octokitValidate(octokit) {
+ octokit.hook.before("request", validate.bind(null, octokit));
+}
+
+
+/***/ }),
+
+/***/ 342:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const http_client_1 = __webpack_require__(539);
+const auth_1 = __webpack_require__(226);
+const crypto = __importStar(__webpack_require__(417));
+const fs = __importStar(__webpack_require__(747));
+const stream = __importStar(__webpack_require__(413));
+const util = __importStar(__webpack_require__(669));
+const constants_1 = __webpack_require__(573);
+const utils = __importStar(__webpack_require__(867));
+const versionSalt = "1.0";
+function isSuccessStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ return statusCode >= 200 && statusCode < 300;
+}
+function isRetryableStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ const retryableStatusCodes = [
+ http_client_1.HttpCodes.BadGateway,
+ http_client_1.HttpCodes.ServiceUnavailable,
+ http_client_1.HttpCodes.GatewayTimeout
+ ];
+ return retryableStatusCodes.includes(statusCode);
+}
+function getCacheApiUrl(resource) {
+ // Ideally we just use ACTIONS_CACHE_URL
+ const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
+ process.env["ACTIONS_RUNTIME_URL"] ||
+ "").replace("pipelines", "artifactcache");
+ if (!baseUrl) {
+ throw new Error("Cache Service Url not found, unable to restore cache.");
+ }
+ const url = `${baseUrl}_apis/artifactcache/${resource}`;
+ core.debug(`Resource Url: ${url}`);
+ return url;
+}
+function createAcceptHeader(type, apiVersion) {
+ return `${type};api-version=${apiVersion}`;
+}
+function getRequestOptions() {
+ const requestOptions = {
+ headers: {
+ Accept: createAcceptHeader("application/json", "6.0-preview.1")
+ }
+ };
+ return requestOptions;
+}
+function createHttpClient() {
+ const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
+ const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
+ return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
+}
+function getCacheVersion() {
+ // Add salt to cache version to support breaking changes in cache entry
+ const components = [
+ core.getInput(constants_1.Inputs.Path, { required: true }),
+ versionSalt
+ ];
+ return crypto
+ .createHash("sha256")
+ .update(components.join("|"))
+ .digest("hex");
+}
+exports.getCacheVersion = getCacheVersion;
+function getCacheEntry(keys) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion();
+ const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
+ const response = yield httpClient.getJson(getCacheApiUrl(resource));
+ if (response.statusCode === 204) {
+ return null;
+ }
+ if (!isSuccessStatusCode(response.statusCode)) {
+ throw new Error(`Cache service responded with ${response.statusCode}`);
+ }
+ const cacheResult = response.result;
+ const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
+ if (!cacheDownloadUrl) {
+ throw new Error("Cache not found.");
+ }
+ core.setSecret(cacheDownloadUrl);
+ core.debug(`Cache Result:`);
+ core.debug(JSON.stringify(cacheResult));
+ return cacheResult;
+ });
+}
+exports.getCacheEntry = getCacheEntry;
+function pipeResponseToStream(response, output) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const pipeline = util.promisify(stream.pipeline);
+ yield pipeline(response.message, output);
+ });
+}
+function downloadCache(archiveLocation, archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stream = fs.createWriteStream(archivePath);
+ const httpClient = new http_client_1.HttpClient("actions/cache");
+ const downloadResponse = yield httpClient.get(archiveLocation);
+ // Abort download if no traffic received over the socket.
+ downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
+ downloadResponse.message.destroy();
+ core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
+ });
+ yield pipeResponseToStream(downloadResponse, stream);
+ // Validate download size.
+ const contentLengthHeader = downloadResponse.message.headers["content-length"];
+ if (contentLengthHeader) {
+ const expectedLength = parseInt(contentLengthHeader);
+ const actualLength = utils.getArchiveFileSize(archivePath);
+ if (actualLength != expectedLength) {
+ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
+ }
+ }
+ else {
+ core.debug("Unable to validate download, no Content-Length header");
+ }
+ });
+}
+exports.downloadCache = downloadCache;
+// Reserve Cache
+function reserveCache(key) {
+ var _a, _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion();
+ const reserveCacheRequest = {
+ key,
+ version
+ };
+ const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
+ return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
+ });
+}
+exports.reserveCache = reserveCache;
+function getContentRange(start, end) {
+ // Format: `bytes start-end/filesize
+ // start and end are inclusive
+ // filesize can be *
+ // For a 200 byte chunk starting at byte 0:
+ // Content-Range: bytes 0-199/*
+ return `bytes ${start}-${end}/*`;
+}
+function uploadChunk(httpClient, resourceUrl, data, start, end) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.debug(`Uploading chunk of size ${end -
+ start +
+ 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
+ const additionalHeaders = {
+ "Content-Type": "application/octet-stream",
+ "Content-Range": getContentRange(start, end)
+ };
+ const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
+ return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
+ });
+ const response = yield uploadChunkRequest();
+ if (isSuccessStatusCode(response.message.statusCode)) {
+ return;
+ }
+ if (isRetryableStatusCode(response.message.statusCode)) {
+ core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
+ const retryResponse = yield uploadChunkRequest();
+ if (isSuccessStatusCode(retryResponse.message.statusCode)) {
+ return;
+ }
+ }
+ throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
+ });
+}
+function parseEnvNumber(key) {
+ const value = Number(process.env[key]);
+ if (Number.isNaN(value) || value < 0) {
+ return undefined;
+ }
+ return value;
+}
+function uploadFile(httpClient, cacheId, archivePath) {
+ var _a, _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ // Upload Chunks
+ const fileSize = fs.statSync(archivePath).size;
+ const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
+ const fd = fs.openSync(archivePath, "r");
+ const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY")) !== null && _a !== void 0 ? _a : 4; // # of HTTP requests in parallel
+ const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE")) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024; // 32 MB Chunks
+ core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
+ const parallelUploads = [...new Array(concurrency).keys()];
+ core.debug("Awaiting all uploads");
+ let offset = 0;
+ try {
+ yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
+ while (offset < fileSize) {
+ const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
+ const start = offset;
+ const end = offset + chunkSize - 1;
+ offset += MAX_CHUNK_SIZE;
+ const chunk = fs.createReadStream(archivePath, {
+ fd,
+ start,
+ end,
+ autoClose: false
+ });
+ yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
+ }
+ })));
+ }
+ finally {
+ fs.closeSync(fd);
+ }
+ return;
+ });
+}
+function commitCache(httpClient, cacheId, filesize) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const commitCacheRequest = { size: filesize };
+ return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
+ });
+}
+function saveCache(cacheId, archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ core.debug("Upload cache");
+ yield uploadFile(httpClient, cacheId, archivePath);
+ // Commit Cache
+ core.debug("Commiting cache");
+ const cacheSize = utils.getArchiveFileSize(archivePath);
+ const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
+ if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
+ throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
+ }
+ core.info("Cache saved successfully");
+ });
+}
+exports.saveCache = saveCache;
+
+
+/***/ }),
+
+/***/ 357:
+/***/ (function(module) {
+
+module.exports = require("assert");
+
+/***/ }),
+
+/***/ 368:
+/***/ (function(module) {
+
+module.exports = function atob(str) {
+ return Buffer.from(str, 'base64').toString('binary')
+}
+
+
+/***/ }),
+
+/***/ 370:
+/***/ (function(module) {
+
+module.exports = deprecate
+
+const loggedMessages = {}
+
+function deprecate (message) {
+ if (loggedMessages[message]) {
+ return
+ }
+
+ console.warn(`DEPRECATED (@octokit/rest): ${message}`)
+ loggedMessages[message] = 1
+}
+
+
+/***/ }),
+
+/***/ 383:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert = __webpack_require__(357);
+const path = __webpack_require__(622);
+const pathHelper = __webpack_require__(972);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Helper class for parsing paths into segments
+ */
+class Path {
+ /**
+ * Constructs a Path
+ * @param itemPath Path or array of segments
+ */
+ constructor(itemPath) {
+ this.segments = [];
+ // String
+ if (typeof itemPath === 'string') {
+ assert(itemPath, `Parameter 'itemPath' must not be empty`);
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ // Not rooted
+ if (!pathHelper.hasRoot(itemPath)) {
+ this.segments = itemPath.split(path.sep);
+ }
+ // Rooted
+ else {
+ // Add all segments, while not at the root
+ let remaining = itemPath;
+ let dir = pathHelper.dirname(remaining);
+ while (dir !== remaining) {
+ // Add the segment
+ const basename = path.basename(remaining);
+ this.segments.unshift(basename);
+ // Truncate the last segment
+ remaining = dir;
+ dir = pathHelper.dirname(remaining);
+ }
+ // Remainder is the root
+ this.segments.unshift(remaining);
+ }
+ }
+ // Array
+ else {
+ // Must not be empty
+ assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
+ // Each segment
+ for (let i = 0; i < itemPath.length; i++) {
+ let segment = itemPath[i];
+ // Must not be empty
+ assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
+ // Normalize slashes
+ segment = pathHelper.normalizeSeparators(itemPath[i]);
+ // Root segment
+ if (i === 0 && pathHelper.hasRoot(segment)) {
+ segment = pathHelper.safeTrimTrailingSeparator(segment);
+ assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
+ this.segments.push(segment);
+ }
+ // All other segments
+ else {
+ // Must not contain slash
+ assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
+ this.segments.push(segment);
+ }
+ }
+ }
+ }
+ /**
+ * Converts the path to it's string representation
+ */
+ toString() {
+ // First segment
+ let result = this.segments[0];
+ // All others
+ let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
+ for (let i = 1; i < this.segments.length; i++) {
+ if (skipSlash) {
+ skipSlash = false;
+ }
+ else {
+ result += path.sep;
+ }
+ result += this.segments[i];
+ }
+ return result;
+ }
+}
+exports.Path = Path;
+//# sourceMappingURL=internal-path.js.map
+
+/***/ }),
+
+/***/ 385:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var isPlainObject = _interopDefault(__webpack_require__(696));
+var universalUserAgent = __webpack_require__(562);
+
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
+ }
+
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
+}
+
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach(key => {
+ if (isPlainObject(options[key])) {
+ if (!(key in defaults)) Object.assign(result, {
+ [key]: options[key]
+ });else result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, {
+ [key]: options[key]
+ });
+ }
+ });
+ return result;
+}
+
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? {
+ method,
+ url
+ } : {
+ url: method
+ }, options);
+ } else {
+ options = Object.assign({}, route);
+ } // lowercase header names before merging with defaults to avoid duplicates
+
+
+ options.headers = lowercaseKeys(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
+
+ if (defaults && defaults.mediaType.previews.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
+ }
+
+ mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
+ return mergedOptions;
+}
+
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
+
+ if (names.length === 0) {
+ return url;
+ }
+
+ return url + separator + names.map(name => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
+
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
+}
+
+const urlVariableRegex = /\{[^}]+\}/g;
+
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
+
+ if (!matches) {
+ return [];
+ }
+
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
+
+function omit(object, keysToOmit) {
+ return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
+ obj[key] = object[key];
+ return obj;
+ }, {});
+}
+
+// Based on https://github.com/bramstein/url-template, licensed under BSD
+// TODO: create separate package.
+//
+// Copyright (c) 2012-2014, Bram Stein
+// All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+// 1. Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. The name of the author may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/* istanbul ignore file */
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+
+ return part;
+ }).join("");
+}
+
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+
+function isDefined(value) {
+ return value !== undefined && value !== null;
+}
+
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+
+function getValues(context, operator, key, modifier) {
+ var value = context[key],
+ result = [];
+
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
+ }
+
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ tmp.push(encodeValue(operator, value));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
+ }
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
+ }
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
+ }
+ }
+
+ return result;
+}
+
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+
+ expression.split(/,/g).forEach(function (variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+
+ if (operator && operator !== "+") {
+ var separator = ",";
+
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
+ }
+ } else {
+ return encodeReserved(literal);
+ }
+ });
+}
+
+function parse(options) {
+ // https://fetch.spec.whatwg.org/#methods
+ let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
+
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
+
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
+ }
+
+ const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequset = /application\/octet-stream/i.test(headers.accept);
+
+ if (!isBinaryRequset) {
+ if (options.mediaType.format) {
+ // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
+ headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ }
+
+ if (options.mediaType.previews.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ } // for GET/HEAD requests, set URL query parameters from remaining parameters
+ // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+
+
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ } else {
+ headers["content-length"] = 0;
+ }
+ }
+ } // default content-type for JSON if body is set
+
+
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
+ // fetch does not allow to set `content-length` header, but we can set body to an empty string
+
+
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ } // Only return body/request keys if present
+
+
+ return Object.assign({
+ method,
+ url,
+ headers
+ }, typeof body !== "undefined" ? {
+ body
+ } : null, options.request ? {
+ request: options.request
+ } : null);
+}
+
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
+}
+
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS = merge(oldDefaults, newDefaults);
+ const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
+ return Object.assign(endpoint, {
+ DEFAULTS,
+ defaults: withDefaults.bind(null, DEFAULTS),
+ merge: merge.bind(null, DEFAULTS),
+ parse
+ });
+}
+
+const VERSION = "6.0.1";
+
+const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
+// So we use RequestParameters and add method as additional required property.
+
+const DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: "",
+ previews: []
+ }
+};
+
+const endpoint = withDefaults(null, DEFAULTS);
+
+exports.endpoint = endpoint;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 389:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const fs = __webpack_require__(747);
+const shebangCommand = __webpack_require__(866);
+
+function readShebang(command) {
+ // Read the first 150 bytes from the file
+ const size = 150;
+ let buffer;
+
+ if (Buffer.alloc) {
+ // Node.js v4.5+ / v5.10+
+ buffer = Buffer.alloc(size);
+ } else {
+ // Old Node.js API
+ buffer = new Buffer(size);
+ buffer.fill(0); // zero-fill
+ }
+
+ let fd;
+
+ try {
+ fd = fs.openSync(command, 'r');
+ fs.readSync(fd, buffer, 0, size, 0);
+ fs.closeSync(fd);
+ } catch (e) { /* Empty */ }
+
+ // Attempt to extract shebang (null is returned if not a shebang)
+ return shebangCommand(buffer.toString());
+}
+
+module.exports = readShebang;
+
+
+/***/ }),
+
+/***/ 402:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var fs = __webpack_require__(747)
+var rp = __webpack_require__(302)
+var minimatch = __webpack_require__(93)
+var Minimatch = minimatch.Minimatch
+var inherits = __webpack_require__(689)
+var EE = __webpack_require__(614).EventEmitter
+var path = __webpack_require__(622)
+var assert = __webpack_require__(357)
+var isAbsolute = __webpack_require__(681)
+var globSync = __webpack_require__(245)
+var common = __webpack_require__(644)
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = __webpack_require__(674)
+var util = __webpack_require__(669)
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = __webpack_require__(969)
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
+
+
+/***/ }),
+
+/***/ 413:
+/***/ (function(module) {
+
+module.exports = require("stream");
+
+/***/ }),
+
+/***/ 414:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationBeforeRequest;
+
+const btoa = __webpack_require__(675);
+
+const withAuthorizationPrefix = __webpack_require__(85);
+
+function authenticationBeforeRequest(state, options) {
+ if (typeof state.auth === "string") {
+ options.headers.authorization = withAuthorizationPrefix(state.auth);
+ return;
+ }
+
+ if (state.auth.username) {
+ const hash = btoa(`${state.auth.username}:${state.auth.password}`);
+ options.headers.authorization = `Basic ${hash}`;
+ if (state.otp) {
+ options.headers["x-github-otp"] = state.otp;
+ }
+ return;
+ }
+
+ if (state.auth.clientId) {
+ // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as
+ // Basic Authorization instead of query parameters. The only routes where that applies share the same
+ // URL though: `/applications/:client_id/tokens/:access_token`.
+ //
+ // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization)
+ // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization)
+ // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application)
+ //
+ // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token"
+ // as well as "/applications/123/tokens/token456"
+ if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) {
+ const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`);
+ options.headers.authorization = `Basic ${hash}`;
+ return;
+ }
+
+ options.url += options.url.indexOf("?") === -1 ? "?" : "&";
+ options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`;
+ return;
+ }
+
+ return Promise.resolve()
+
+ .then(() => {
+ return state.auth();
+ })
+
+ .then(authorization => {
+ options.headers.authorization = withAuthorizationPrefix(authorization);
+ });
+}
+
+
+/***/ }),
+
+/***/ 417:
+/***/ (function(module) {
+
+module.exports = require("crypto");
+
+/***/ }),
+
+/***/ 427:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+// Older verions of Node.js might not have `util.getSystemErrorName()`.
+// In that case, fall back to a deprecated internal.
+const util = __webpack_require__(669);
+
+let uv;
+
+if (typeof util.getSystemErrorName === 'function') {
+ module.exports = util.getSystemErrorName;
+} else {
+ try {
+ uv = process.binding('uv');
+
+ if (typeof uv.errname !== 'function') {
+ throw new TypeError('uv.errname is not a function');
+ }
+ } catch (err) {
+ console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
+ uv = null;
+ }
+
+ module.exports = code => errname(uv, code);
+}
+
+// Used for testing the fallback behavior
+module.exports.__test__ = errname;
+
+function errname(uv, code) {
+ if (uv) {
+ return uv.errname(code);
+ }
+
+ if (!(code < 0)) {
+ throw new Error('err >= 0');
+ }
+
+ return `Unknown system error ${code}`;
+}
+
+
+
+/***/ }),
+
+/***/ 431:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const os = __importStar(__webpack_require__(87));
+/**
+ * Commands
+ *
+ * Command Format:
+ * ::name key=value,key=value::message
+ *
+ * Examples:
+ * ::warning::This is the message
+ * ::set-env name=MY_VAR::some value
+ */
+function issueCommand(command, properties, message) {
+ const cmd = new Command(command, properties, message);
+ process.stdout.write(cmd.toString() + os.EOL);
+}
+exports.issueCommand = issueCommand;
+function issue(name, message = '') {
+ issueCommand(name, {}, message);
+}
+exports.issue = issue;
+const CMD_STRING = '::';
+class Command {
+ constructor(command, properties, message) {
+ if (!command) {
+ command = 'missing.command';
+ }
+ this.command = command;
+ this.properties = properties;
+ this.message = message;
+ }
+ toString() {
+ let cmdStr = CMD_STRING + this.command;
+ if (this.properties && Object.keys(this.properties).length > 0) {
+ cmdStr += ' ';
+ let first = true;
+ for (const key in this.properties) {
+ if (this.properties.hasOwnProperty(key)) {
+ const val = this.properties[key];
+ if (val) {
+ if (first) {
+ first = false;
+ }
+ else {
+ cmdStr += ',';
+ }
+ cmdStr += `${key}=${escapeProperty(val)}`;
+ }
+ }
+ }
+ }
+ cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
+ return cmdStr;
+ }
+}
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+ if (input === null || input === undefined) {
+ return '';
+ }
+ else if (typeof input === 'string' || input instanceof String) {
+ return input;
+ }
+ return JSON.stringify(input);
+}
+exports.toCommandValue = toCommandValue;
+function escapeData(s) {
+ return toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A');
+}
+function escapeProperty(s) {
+ return toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A')
+ .replace(/:/g, '%3A')
+ .replace(/,/g, '%2C');
+}
+//# sourceMappingURL=command.js.map
+
+/***/ }),
+
+/***/ 446:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationRequestError;
+
+const { RequestError } = __webpack_require__(102);
+
+function authenticationRequestError(state, error, options) {
+ if (!error.headers) throw error;
+
+ const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
+ // handle "2FA required" error only
+ if (error.status !== 401 || !otpRequired) {
+ throw error;
+ }
+
+ if (
+ error.status === 401 &&
+ otpRequired &&
+ error.request &&
+ error.request.headers["x-github-otp"]
+ ) {
+ if (state.otp) {
+ delete state.otp; // no longer valid, request again
+ } else {
+ throw new RequestError(
+ "Invalid one-time password for two-factor authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+ }
+
+ if (typeof state.auth.on2fa !== "function") {
+ throw new RequestError(
+ "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
+ 401,
+ {
+ headers: error.headers,
+ request: options
+ }
+ );
+ }
+
+ return Promise.resolve()
+ .then(() => {
+ return state.auth.on2fa();
+ })
+ .then(oneTimePassword => {
+ const newOptions = Object.assign(options, {
+ headers: Object.assign(options.headers, {
+ "x-github-otp": oneTimePassword
+ })
+ });
+ return state.octokit.request(newOptions).then(response => {
+ // If OTP still valid, then persist it for following requests
+ state.otp = oneTimePassword;
+ return response;
+ });
+ });
+}
+
+
+/***/ }),
+
+/***/ 453:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(969)
+var eos = __webpack_require__(9)
+var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
+
+var noop = function () {}
+var ancient = /^v?\.0/.test(process.version)
+
+var isFn = function (fn) {
+ return typeof fn === 'function'
+}
+
+var isFS = function (stream) {
+ if (!ancient) return false // newer node version do not need to care about fs is a special way
+ if (!fs) return false // browser
+ return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
+}
+
+var isRequest = function (stream) {
+ return stream.setHeader && isFn(stream.abort)
+}
+
+var destroyer = function (stream, reading, writing, callback) {
+ callback = once(callback)
+
+ var closed = false
+ stream.on('close', function () {
+ closed = true
+ })
+
+ eos(stream, {readable: reading, writable: writing}, function (err) {
+ if (err) return callback(err)
+ closed = true
+ callback()
+ })
+
+ var destroyed = false
+ return function (err) {
+ if (closed) return
+ if (destroyed) return
+ destroyed = true
+
+ if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
+ if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
+
+ if (isFn(stream.destroy)) return stream.destroy()
+
+ callback(err || new Error('stream was destroyed'))
+ }
+}
+
+var call = function (fn) {
+ fn()
+}
+
+var pipe = function (from, to) {
+ return from.pipe(to)
+}
+
+var pump = function () {
+ var streams = Array.prototype.slice.call(arguments)
+ var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
+
+ if (Array.isArray(streams[0])) streams = streams[0]
+ if (streams.length < 2) throw new Error('pump requires two streams per minimum')
+
+ var error
+ var destroys = streams.map(function (stream, i) {
+ var reading = i < streams.length - 1
+ var writing = i > 0
+ return destroyer(stream, reading, writing, function (err) {
+ if (!error) error = err
+ if (err) destroys.forEach(call)
+ if (reading) return
+ destroys.forEach(call)
+ callback(error)
+ })
+ })
+
+ return streams.reduce(pipe)
+}
+
+module.exports = pump
+
+
+/***/ }),
+
+/***/ 454:
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var Stream = _interopDefault(__webpack_require__(413));
+var http = _interopDefault(__webpack_require__(605));
+var Url = _interopDefault(__webpack_require__(835));
+var https = _interopDefault(__webpack_require__(34));
+var zlib = _interopDefault(__webpack_require__(761));
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = __webpack_require__(18).convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parse_url(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parse_url(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parse_url(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+const resolve_url = Url.resolve;
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ request.body.destroy(error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+ finalize();
+ });
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ const locationURL = location === null ? null : resolve_url(request.url, location);
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout
+ };
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
+
+
+/***/ }),
+
+/***/ 462:
+/***/ (function(module) {
+
+"use strict";
+
+
+// See http://www.robvanderwoude.com/escapechars.php
+const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
+
+function escapeCommand(arg) {
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ return arg;
+}
+
+function escapeArgument(arg, doubleEscapeMetaChars) {
+ // Convert to string
+ arg = `${arg}`;
+
+ // Algorithm below is based on https://qntm.org/cmd
+
+ // Sequence of backslashes followed by a double quote:
+ // double up all the backslashes and escape the double quote
+ arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+
+ // Sequence of backslashes followed by the end of the string
+ // (which will become a double quote later):
+ // double up all the backslashes
+ arg = arg.replace(/(\\*)$/, '$1$1');
+
+ // All other backslashes occur literally
+
+ // Quote the whole thing:
+ arg = `"${arg}"`;
+
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ // Double escape meta chars if necessary
+ if (doubleEscapeMetaChars) {
+ arg = arg.replace(metaCharsRegExp, '^$1');
+ }
+
+ return arg;
+}
+
+module.exports.command = escapeCommand;
+module.exports.argument = escapeArgument;
+
+
+/***/ }),
+
+/***/ 463:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __webpack_require__(692);
+var once = _interopDefault(__webpack_require__(969));
+
+const logOnce = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ this.headers = options.headers || {}; // redact request credentials without mutating original request options
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 469:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
+const graphql_1 = __webpack_require__(898);
+const rest_1 = __webpack_require__(741);
+const Context = __importStar(__webpack_require__(262));
+const httpClient = __importStar(__webpack_require__(539));
+// We need this in order to extend Octokit
+rest_1.Octokit.prototype = new rest_1.Octokit();
+exports.context = new Context.Context();
+class GitHub extends rest_1.Octokit {
+ constructor(token, opts) {
+ super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
+ this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
+ }
+ /**
+ * Disambiguates the constructor overload parameters
+ */
+ static disambiguate(token, opts) {
+ return [
+ typeof token === 'string' ? token : '',
+ typeof token === 'object' ? token : opts || {}
+ ];
+ }
+ static getOctokitOptions(args) {
+ const token = args[0];
+ const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
+ // Auth
+ const auth = GitHub.getAuthString(token, options);
+ if (auth) {
+ options.auth = auth;
+ }
+ // Proxy
+ const agent = GitHub.getProxyAgent(options);
+ if (agent) {
+ // Shallow clone - don't mutate the object provided by the caller
+ options.request = options.request ? Object.assign({}, options.request) : {};
+ // Set the agent
+ options.request.agent = agent;
+ }
+ return options;
+ }
+ static getGraphQL(args) {
+ const defaults = {};
+ const token = args[0];
+ const options = args[1];
+ // Authorization
+ const auth = this.getAuthString(token, options);
+ if (auth) {
+ defaults.headers = {
+ authorization: auth
+ };
+ }
+ // Proxy
+ const agent = GitHub.getProxyAgent(options);
+ if (agent) {
+ defaults.request = { agent };
+ }
+ return graphql_1.graphql.defaults(defaults);
+ }
+ static getAuthString(token, options) {
+ // Validate args
+ if (!token && !options.auth) {
+ throw new Error('Parameter token or opts.auth is required');
+ }
+ else if (token && options.auth) {
+ throw new Error('Parameters token and opts.auth may not both be specified');
+ }
+ return typeof options.auth === 'string' ? options.auth : `token ${token}`;
+ }
+ static getProxyAgent(options) {
+ var _a;
+ if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
+ const serverUrl = 'https://api.github.com';
+ if (httpClient.getProxyUrl(serverUrl)) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgent(serverUrl);
+ }
+ }
+ return undefined;
+ }
+}
+exports.GitHub = GitHub;
+//# sourceMappingURL=github.js.map
+
+/***/ }),
+
+/***/ 470:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const command_1 = __webpack_require__(431);
+const os = __importStar(__webpack_require__(87));
+const path = __importStar(__webpack_require__(622));
+/**
+ * The code to exit an action
+ */
+var ExitCode;
+(function (ExitCode) {
+ /**
+ * A code indicating that the action was successful
+ */
+ ExitCode[ExitCode["Success"] = 0] = "Success";
+ /**
+ * A code indicating that the action was a failure
+ */
+ ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+ const convertedVal = command_1.toCommandValue(val);
+ process.env[name] = convertedVal;
+ command_1.issueCommand('set-env', { name }, convertedVal);
+}
+exports.exportVariable = exportVariable;
+/**
+ * Registers a secret which will get masked from logs
+ * @param secret value of the secret
+ */
+function setSecret(secret) {
+ command_1.issueCommand('add-mask', {}, secret);
+}
+exports.setSecret = setSecret;
+/**
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
+ */
+function addPath(inputPath) {
+ command_1.issueCommand('add-path', {}, inputPath);
+ process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
+}
+exports.addPath = addPath;
+/**
+ * Gets the value of an input. The value is also trimmed.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string
+ */
+function getInput(name, options) {
+ const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+ if (options && options.required && !val) {
+ throw new Error(`Input required and not supplied: ${name}`);
+ }
+ return val.trim();
+}
+exports.getInput = getInput;
+/**
+ * Sets the value of an output.
+ *
+ * @param name name of the output to set
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+ command_1.issueCommand('set-output', { name }, value);
+}
+exports.setOutput = setOutput;
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+ command_1.issue('echo', enabled ? 'on' : 'off');
+}
+exports.setCommandEcho = setCommandEcho;
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+ process.exitCode = ExitCode.Failure;
+ error(message);
+}
+exports.setFailed = setFailed;
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+ return process.env['RUNNER_DEBUG'] === '1';
+}
+exports.isDebug = isDebug;
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function debug(message) {
+ command_1.issueCommand('debug', {}, message);
+}
+exports.debug = debug;
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ */
+function error(message) {
+ command_1.issue('error', message instanceof Error ? message.toString() : message);
+}
+exports.error = error;
+/**
+ * Adds an warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ */
+function warning(message) {
+ command_1.issue('warning', message instanceof Error ? message.toString() : message);
+}
+exports.warning = warning;
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+ process.stdout.write(message + os.EOL);
+}
+exports.info = info;
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+ command_1.issue('group', name);
+}
+exports.startGroup = startGroup;
+/**
+ * End an output group.
+ */
+function endGroup() {
+ command_1.issue('endgroup');
+}
+exports.endGroup = endGroup;
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+ return __awaiter(this, void 0, void 0, function* () {
+ startGroup(name);
+ let result;
+ try {
+ result = yield fn();
+ }
+ finally {
+ endGroup();
+ }
+ return result;
+ });
+}
+exports.group = group;
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param name name of the state to store
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+ command_1.issueCommand('save-state', { name }, value);
+}
+exports.saveState = saveState;
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param name name of the state to get
+ * @returns string
+ */
+function getState(name) {
+ return process.env[`STATE_${name}`] || '';
+}
+exports.getState = getState;
+//# sourceMappingURL=core.js.map
+
+/***/ }),
+
+/***/ 489:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const path = __webpack_require__(622);
+const which = __webpack_require__(814);
+const pathKey = __webpack_require__(39)();
+
+function resolveCommandAttempt(parsed, withoutPathExt) {
+ const cwd = process.cwd();
+ const hasCustomCwd = parsed.options.cwd != null;
+
+ // If a custom `cwd` was specified, we need to change the process cwd
+ // because `which` will do stat calls but does not support a custom cwd
+ if (hasCustomCwd) {
+ try {
+ process.chdir(parsed.options.cwd);
+ } catch (err) {
+ /* Empty */
+ }
+ }
+
+ let resolved;
+
+ try {
+ resolved = which.sync(parsed.command, {
+ path: (parsed.options.env || process.env)[pathKey],
+ pathExt: withoutPathExt ? path.delimiter : undefined,
+ });
+ } catch (e) {
+ /* Empty */
+ } finally {
+ process.chdir(cwd);
+ }
+
+ // If we successfully resolved, ensure that an absolute path is returned
+ // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
+ if (resolved) {
+ resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
+ }
+
+ return resolved;
+}
+
+function resolveCommand(parsed) {
+ return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
+}
+
+module.exports = resolveCommand;
+
+
+/***/ }),
+
+/***/ 510:
+/***/ (function(module) {
+
+module.exports = addHook
+
+function addHook (state, kind, name, hook) {
+ var orig = hook
+ if (!state.registry[name]) {
+ state.registry[name] = []
+ }
+
+ if (kind === 'before') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options))
+ }
+ }
+
+ if (kind === 'after') {
+ hook = function (method, options) {
+ var result
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_
+ return orig(result, options)
+ })
+ .then(function () {
+ return result
+ })
+ }
+ }
+
+ if (kind === 'error') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options)
+ })
+ }
+ }
+
+ state.registry[name].push({
+ hook: hook,
+ orig: orig
+ })
+}
+
+
+/***/ }),
+
+/***/ 513:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = factory;
+
+const Octokit = __webpack_require__(135);
+const registerPlugin = __webpack_require__(861);
+
+function factory(plugins) {
+ const Api = Octokit.bind(null, plugins || []);
+ Api.plugin = registerPlugin.bind(null, plugins || []);
+ return Api;
+}
+
+
+/***/ }),
+
+/***/ 514:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const io = __importStar(__webpack_require__(1));
+const tc = __importStar(__webpack_require__(533));
+const installer = __importStar(__webpack_require__(634));
+const path_1 = __importDefault(__webpack_require__(622));
+const child_process_1 = __importDefault(__webpack_require__(129));
+const fs_1 = __importDefault(__webpack_require__(747));
+function run() {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ //
+ // versionSpec is optional. If supplied, install / use from the tool cache
+ // If not supplied then problem matchers will still be setup. Useful for self-hosted.
+ //
+ let versionSpec = core.getInput('go-version');
+ // stable will be true unless false is the exact input
+ // since getting unstable versions should be explicit
+ let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
+ console.log(`Setup go ${stable ? 'stable' : ''} version spec ${versionSpec}`);
+ if (versionSpec) {
+ let installDir = tc.find('go', versionSpec);
+ if (!installDir) {
+ console.log(`A version satisfying ${versionSpec} not found locally, attempting to download ...`);
+ installDir = yield installer.downloadGo(versionSpec, stable);
+ console.log('Installed');
+ }
+ if (installDir) {
+ core.exportVariable('GOROOT', installDir);
+ core.addPath(path_1.default.join(installDir, 'bin'));
+ console.log('Added go to the path');
+ let added = addBinToPath();
+ core.debug(`add bin ${added}`);
+ }
+ else {
+ throw new Error(`Could not find a version that satisfied version spec: ${versionSpec}`);
+ }
+ }
+ // add problem matchers
+ const matchersPath = path_1.default.join(__dirname, '..', 'matchers.json');
+ console.log(`##[add-matcher]${matchersPath}`);
+ // output the version actually being used
+ let goPath = yield io.which('go');
+ let goVersion = (child_process_1.default.execSync(`${goPath} version`) || '').toString();
+ console.log(goVersion);
+ core.startGroup('go env');
+ let goEnv = (child_process_1.default.execSync(`${goPath} env`) || '').toString();
+ console.log(goEnv);
+ core.endGroup();
+ }
+ catch (error) {
+ core.setFailed(error.message);
+ }
+ });
+}
+exports.run = run;
+function addBinToPath() {
+ return __awaiter(this, void 0, void 0, function* () {
+ let added = false;
+ let g = yield io.which('go');
+ _debug(`which go :${g}:`);
+ if (!g) {
+ _debug('go not in the path');
+ return added;
+ }
+ let buf = child_process_1.default.execSync('go env GOPATH');
+ if (buf) {
+ let gp = buf.toString().trim();
+ _debug(`go env GOPATH :${gp}:`);
+ if (!fs_1.default.existsSync(gp)) {
+ // some of the hosted images have go install but not profile dir
+ _debug(`creating ${gp}`);
+ io.mkdirP(gp);
+ }
+ let bp = path_1.default.join(gp, 'bin');
+ if (!fs_1.default.existsSync(bp)) {
+ _debug(`creating ${bp}`);
+ io.mkdirP(bp);
+ }
+ core.addPath(bp);
+ added = true;
+ }
+ return added;
+ });
+}
+exports.addBinToPath = addBinToPath;
+function _debug(message) {
+ core.debug(message);
+}
+exports._debug = _debug;
+
+
+/***/ }),
+
+/***/ 523:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var register = __webpack_require__(280)
+var addHook = __webpack_require__(510)
+var removeHook = __webpack_require__(763)
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind
+var bindable = bind.bind(bind)
+
+function bindApi (hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
+ hook.api = { remove: removeHookRef }
+ hook.remove = removeHookRef
+
+ ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind]
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
+ })
+}
+
+function HookSingular () {
+ var singularHookName = 'h'
+ var singularHookState = {
+ registry: {}
+ }
+ var singularHook = register.bind(null, singularHookState, singularHookName)
+ bindApi(singularHook, singularHookState, singularHookName)
+ return singularHook
+}
+
+function HookCollection () {
+ var state = {
+ registry: {}
+ }
+
+ var hook = register.bind(null, state)
+ bindApi(hook, state)
+
+ return hook
+}
+
+var collectionHookDeprecationMessageDisplayed = false
+function Hook () {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
+ collectionHookDeprecationMessageDisplayed = true
+ }
+ return HookCollection()
+}
+
+Hook.Singular = HookSingular.bind()
+Hook.Collection = HookCollection.bind()
+
+module.exports = Hook
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook
+module.exports.Singular = Hook.Singular
+module.exports.Collection = Hook.Collection
+
+
+/***/ }),
+
+/***/ 533:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const io = __importStar(__webpack_require__(1));
+const fs = __importStar(__webpack_require__(747));
+const os = __importStar(__webpack_require__(87));
+const path = __importStar(__webpack_require__(622));
+const httpm = __importStar(__webpack_require__(539));
+const semver = __importStar(__webpack_require__(864));
+const stream = __importStar(__webpack_require__(413));
+const util = __importStar(__webpack_require__(669));
+const v4_1 = __importDefault(__webpack_require__(826));
+const exec_1 = __webpack_require__(986);
+const assert_1 = __webpack_require__(357);
+const retry_helper_1 = __webpack_require__(979);
+class HTTPError extends Error {
+ constructor(httpStatusCode) {
+ super(`Unexpected HTTP response: ${httpStatusCode}`);
+ this.httpStatusCode = httpStatusCode;
+ Object.setPrototypeOf(this, new.target.prototype);
+ }
+}
+exports.HTTPError = HTTPError;
+const IS_WINDOWS = process.platform === 'win32';
+const userAgent = 'actions/tool-cache';
+/**
+ * Download a tool from an url and stream it into a file
+ *
+ * @param url url of tool to download
+ * @param dest path to download tool
+ * @returns path to downloaded tool
+ */
+function downloadTool(url, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ dest = dest || path.join(_getTempDirectory(), v4_1.default());
+ yield io.mkdirP(path.dirname(dest));
+ core.debug(`Downloading ${url}`);
+ core.debug(`Destination ${dest}`);
+ const maxAttempts = 3;
+ const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
+ const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
+ const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
+ return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
+ return yield downloadToolAttempt(url, dest || '');
+ }), (err) => {
+ if (err instanceof HTTPError && err.httpStatusCode) {
+ // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
+ if (err.httpStatusCode < 500 &&
+ err.httpStatusCode !== 408 &&
+ err.httpStatusCode !== 429) {
+ return false;
+ }
+ }
+ // Otherwise retry
+ return true;
+ });
+ });
+}
+exports.downloadTool = downloadTool;
+function downloadToolAttempt(url, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (fs.existsSync(dest)) {
+ throw new Error(`Destination file path ${dest} already exists`);
+ }
+ // Get the response headers
+ const http = new httpm.HttpClient(userAgent, [], {
+ allowRetries: false
+ });
+ const response = yield http.get(url);
+ if (response.message.statusCode !== 200) {
+ const err = new HTTPError(response.message.statusCode);
+ core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
+ throw err;
+ }
+ // Download the response body
+ const pipeline = util.promisify(stream.pipeline);
+ const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
+ const readStream = responseMessageFactory();
+ let succeeded = false;
+ try {
+ yield pipeline(readStream, fs.createWriteStream(dest));
+ core.debug('download complete');
+ succeeded = true;
+ return dest;
+ }
+ finally {
+ // Error, delete dest before retry
+ if (!succeeded) {
+ core.debug('download failed');
+ try {
+ yield io.rmRF(dest);
+ }
+ catch (err) {
+ core.debug(`Failed to delete '${dest}'. ${err.message}`);
+ }
+ }
+ }
+ });
+}
+/**
+ * Extract a .7z file
+ *
+ * @param file path to the .7z file
+ * @param dest destination directory. Optional.
+ * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
+ * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
+ * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
+ * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
+ * interface, it is smaller than the full command line interface, and it does support long paths. At the
+ * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
+ * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
+ * to 7zr.exe can be pass to this function.
+ * @returns path to the destination directory
+ */
+function extract7z(file, dest, _7zPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
+ assert_1.ok(file, 'parameter "file" is required');
+ dest = yield _createExtractFolder(dest);
+ const originalCwd = process.cwd();
+ process.chdir(dest);
+ if (_7zPath) {
+ try {
+ const args = [
+ 'x',
+ '-bb1',
+ '-bd',
+ '-sccUTF-8',
+ file
+ ];
+ const options = {
+ silent: true
+ };
+ yield exec_1.exec(`"${_7zPath}"`, args, options);
+ }
+ finally {
+ process.chdir(originalCwd);
+ }
+ }
+ else {
+ const escapedScript = path
+ .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
+ .replace(/'/g, "''")
+ .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
+ const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
+ const args = [
+ '-NoLogo',
+ '-Sta',
+ '-NoProfile',
+ '-NonInteractive',
+ '-ExecutionPolicy',
+ 'Unrestricted',
+ '-Command',
+ command
+ ];
+ const options = {
+ silent: true
+ };
+ try {
+ const powershellPath = yield io.which('powershell', true);
+ yield exec_1.exec(`"${powershellPath}"`, args, options);
+ }
+ finally {
+ process.chdir(originalCwd);
+ }
+ }
+ return dest;
+ });
+}
+exports.extract7z = extract7z;
+/**
+ * Extract a compressed tar archive
+ *
+ * @param file path to the tar
+ * @param dest destination directory. Optional.
+ * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
+ * @returns path to the destination directory
+ */
+function extractTar(file, dest, flags = 'xz') {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!file) {
+ throw new Error("parameter 'file' is required");
+ }
+ // Create dest
+ dest = yield _createExtractFolder(dest);
+ // Determine whether GNU tar
+ core.debug('Checking tar --version');
+ let versionOutput = '';
+ yield exec_1.exec('tar --version', [], {
+ ignoreReturnCode: true,
+ silent: true,
+ listeners: {
+ stdout: (data) => (versionOutput += data.toString()),
+ stderr: (data) => (versionOutput += data.toString())
+ }
+ });
+ core.debug(versionOutput.trim());
+ const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
+ // Initialize args
+ const args = [flags];
+ let destArg = dest;
+ let fileArg = file;
+ if (IS_WINDOWS && isGnuTar) {
+ args.push('--force-local');
+ destArg = dest.replace(/\\/g, '/');
+ // Technically only the dest needs to have `/` but for aesthetic consistency
+ // convert slashes in the file arg too.
+ fileArg = file.replace(/\\/g, '/');
+ }
+ if (isGnuTar) {
+ // Suppress warnings when using GNU tar to extract archives created by BSD tar
+ args.push('--warning=no-unknown-keyword');
+ }
+ args.push('-C', destArg, '-f', fileArg);
+ yield exec_1.exec(`tar`, args);
+ return dest;
+ });
+}
+exports.extractTar = extractTar;
+/**
+ * Extract a zip
+ *
+ * @param file path to the zip
+ * @param dest destination directory. Optional.
+ * @returns path to the destination directory
+ */
+function extractZip(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!file) {
+ throw new Error("parameter 'file' is required");
+ }
+ dest = yield _createExtractFolder(dest);
+ if (IS_WINDOWS) {
+ yield extractZipWin(file, dest);
+ }
+ else {
+ yield extractZipNix(file, dest);
+ }
+ return dest;
+ });
+}
+exports.extractZip = extractZip;
+function extractZipWin(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // build the powershell command
+ const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
+ const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
+ const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
+ // run powershell
+ const powershellPath = yield io.which('powershell');
+ const args = [
+ '-NoLogo',
+ '-Sta',
+ '-NoProfile',
+ '-NonInteractive',
+ '-ExecutionPolicy',
+ 'Unrestricted',
+ '-Command',
+ command
+ ];
+ yield exec_1.exec(`"${powershellPath}"`, args);
+ });
+}
+function extractZipNix(file, dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const unzipPath = yield io.which('unzip');
+ yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
+ });
+}
+/**
+ * Caches a directory and installs it into the tool cacheDir
+ *
+ * @param sourceDir the directory to cache into tools
+ * @param tool tool name
+ * @param version version of the tool. semver format
+ * @param arch architecture of the tool. Optional. Defaults to machine architecture
+ */
+function cacheDir(sourceDir, tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ version = semver.clean(version) || version;
+ arch = arch || os.arch();
+ core.debug(`Caching tool ${tool} ${version} ${arch}`);
+ core.debug(`source dir: ${sourceDir}`);
+ if (!fs.statSync(sourceDir).isDirectory()) {
+ throw new Error('sourceDir is not a directory');
+ }
+ // Create the tool dir
+ const destPath = yield _createToolPath(tool, version, arch);
+ // copy each child item. do not move. move can fail on Windows
+ // due to anti-virus software having an open handle on a file.
+ for (const itemName of fs.readdirSync(sourceDir)) {
+ const s = path.join(sourceDir, itemName);
+ yield io.cp(s, destPath, { recursive: true });
+ }
+ // write .complete
+ _completeToolPath(tool, version, arch);
+ return destPath;
+ });
+}
+exports.cacheDir = cacheDir;
+/**
+ * Caches a downloaded file (GUID) and installs it
+ * into the tool cache with a given targetName
+ *
+ * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
+ * @param targetFile the name of the file name in the tools directory
+ * @param tool tool name
+ * @param version version of the tool. semver format
+ * @param arch architecture of the tool. Optional. Defaults to machine architecture
+ */
+function cacheFile(sourceFile, targetFile, tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ version = semver.clean(version) || version;
+ arch = arch || os.arch();
+ core.debug(`Caching tool ${tool} ${version} ${arch}`);
+ core.debug(`source file: ${sourceFile}`);
+ if (!fs.statSync(sourceFile).isFile()) {
+ throw new Error('sourceFile is not a file');
+ }
+ // create the tool dir
+ const destFolder = yield _createToolPath(tool, version, arch);
+ // copy instead of move. move can fail on Windows due to
+ // anti-virus software having an open handle on a file.
+ const destPath = path.join(destFolder, targetFile);
+ core.debug(`destination file ${destPath}`);
+ yield io.cp(sourceFile, destPath);
+ // write .complete
+ _completeToolPath(tool, version, arch);
+ return destFolder;
+ });
+}
+exports.cacheFile = cacheFile;
+/**
+ * Finds the path to a tool version in the local installed tool cache
+ *
+ * @param toolName name of the tool
+ * @param versionSpec version of the tool
+ * @param arch optional arch. defaults to arch of computer
+ */
+function find(toolName, versionSpec, arch) {
+ if (!toolName) {
+ throw new Error('toolName parameter is required');
+ }
+ if (!versionSpec) {
+ throw new Error('versionSpec parameter is required');
+ }
+ arch = arch || os.arch();
+ // attempt to resolve an explicit version
+ if (!_isExplicitVersion(versionSpec)) {
+ const localVersions = findAllVersions(toolName, arch);
+ const match = _evaluateVersions(localVersions, versionSpec);
+ versionSpec = match;
+ }
+ // check for the explicit version in the cache
+ let toolPath = '';
+ if (versionSpec) {
+ versionSpec = semver.clean(versionSpec) || '';
+ const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
+ core.debug(`checking cache: ${cachePath}`);
+ if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
+ core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
+ toolPath = cachePath;
+ }
+ else {
+ core.debug('not found');
+ }
+ }
+ return toolPath;
+}
+exports.find = find;
+/**
+ * Finds the paths to all versions of a tool that are installed in the local tool cache
+ *
+ * @param toolName name of the tool
+ * @param arch optional arch. defaults to arch of computer
+ */
+function findAllVersions(toolName, arch) {
+ const versions = [];
+ arch = arch || os.arch();
+ const toolPath = path.join(_getCacheDirectory(), toolName);
+ if (fs.existsSync(toolPath)) {
+ const children = fs.readdirSync(toolPath);
+ for (const child of children) {
+ if (_isExplicitVersion(child)) {
+ const fullPath = path.join(toolPath, child, arch || '');
+ if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
+ versions.push(child);
+ }
+ }
+ }
+ }
+ return versions;
+}
+exports.findAllVersions = findAllVersions;
+function _createExtractFolder(dest) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!dest) {
+ // create a temp dir
+ dest = path.join(_getTempDirectory(), v4_1.default());
+ }
+ yield io.mkdirP(dest);
+ return dest;
+ });
+}
+function _createToolPath(tool, version, arch) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
+ core.debug(`destination ${folderPath}`);
+ const markerPath = `${folderPath}.complete`;
+ yield io.rmRF(folderPath);
+ yield io.rmRF(markerPath);
+ yield io.mkdirP(folderPath);
+ return folderPath;
+ });
+}
+function _completeToolPath(tool, version, arch) {
+ const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
+ const markerPath = `${folderPath}.complete`;
+ fs.writeFileSync(markerPath, '');
+ core.debug('finished caching tool');
+}
+function _isExplicitVersion(versionSpec) {
+ const c = semver.clean(versionSpec) || '';
+ core.debug(`isExplicit: ${c}`);
+ const valid = semver.valid(c) != null;
+ core.debug(`explicit? ${valid}`);
+ return valid;
+}
+function _evaluateVersions(versions, versionSpec) {
+ let version = '';
+ core.debug(`evaluating ${versions.length} versions`);
+ versions = versions.sort((a, b) => {
+ if (semver.gt(a, b)) {
+ return 1;
+ }
+ return -1;
+ });
+ for (let i = versions.length - 1; i >= 0; i--) {
+ const potential = versions[i];
+ const satisfied = semver.satisfies(potential, versionSpec);
+ if (satisfied) {
+ version = potential;
+ break;
+ }
+ }
+ if (version) {
+ core.debug(`matched: ${version}`);
+ }
+ else {
+ core.debug('match not found');
+ }
+ return version;
+}
+/**
+ * Gets RUNNER_TOOL_CACHE
+ */
+function _getCacheDirectory() {
+ const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
+ assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
+ return cacheDirectory;
+}
+/**
+ * Gets RUNNER_TEMP
+ */
+function _getTempDirectory() {
+ const tempDirectory = process.env['RUNNER_TEMP'] || '';
+ assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
+ return tempDirectory;
+}
+/**
+ * Gets a global variable
+ */
+function _getGlobal(key, defaultValue) {
+ /* eslint-disable @typescript-eslint/no-explicit-any */
+ const value = global[key];
+ /* eslint-enable @typescript-eslint/no-explicit-any */
+ return value !== undefined ? value : defaultValue;
+}
+//# sourceMappingURL=tool-cache.js.map
+
+/***/ }),
+
+/***/ 536:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasFirstPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasFirstPage (link) {
+ deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).first
+}
+
+
+/***/ }),
+
+/***/ 538:
+/***/ (function(module) {
+
+module.exports = validateAuth;
+
+function validateAuth(auth) {
+ if (typeof auth === "string") {
+ return;
+ }
+
+ if (typeof auth === "function") {
+ return;
+ }
+
+ if (auth.username && auth.password) {
+ return;
+ }
+
+ if (auth.clientId && auth.clientSecret) {
+ return;
+ }
+
+ throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`);
+}
+
+
+/***/ }),
+
+/***/ 539:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const url = __webpack_require__(835);
+const http = __webpack_require__(605);
+const https = __webpack_require__(34);
+const pm = __webpack_require__(950);
+let tunnel;
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers = exports.Headers || (exports.Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+ let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
+}
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
+ }
+ readBody() {
+ return new Promise(async (resolve, reject) => {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ });
+ }
+}
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+ let parsedUrl = url.parse(requestUrl);
+ return parsedUrl.protocol === 'https:';
+}
+exports.isHttps = isHttps;
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = userAgent;
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
+ }
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
+ }
+ }
+ }
+ options(requestUrl, additionalHeaders) {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ }
+ get(requestUrl, additionalHeaders) {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ }
+ del(requestUrl, additionalHeaders) {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ }
+ post(requestUrl, data, additionalHeaders) {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ }
+ patch(requestUrl, data, additionalHeaders) {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ }
+ put(requestUrl, data, additionalHeaders) {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ }
+ head(requestUrl, additionalHeaders) {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ }
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ }
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ async getJson(requestUrl, additionalHeaders = {}) {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ let res = await this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async postJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async putJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async patchJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ async request(verb, requestUrl, data, headers) {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ let parsedUrl = url.parse(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ while (numTries < maxTries) {
+ response = await this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (let i = 0; i < this.handlers.length; i++) {
+ if (this.handlers[i].canHandleAuthentication(response)) {
+ authenticationHandler = this.handlers[i];
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ let parsedRedirectUrl = url.parse(redirectUrl);
+ if (parsedUrl.protocol == 'https:' &&
+ parsedUrl.protocol != parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ await response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (let header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = await this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ await response.readBody();
+ await this._performExponentialBackoff(numTries);
+ }
+ }
+ return response;
+ }
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
+ }
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return new Promise((resolve, reject) => {
+ let callbackForResult = function (err, res) {
+ if (err) {
+ reject(err);
+ }
+ resolve(res);
+ };
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ }
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ let socket;
+ if (typeof data === 'string') {
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ let handleResult = (err, res) => {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ };
+ let req = info.httpModule.request(info.options, (msg) => {
+ let res = new HttpClientResponse(msg);
+ handleResult(null, res);
+ });
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error('Request timeout: ' + info.options.path), null);
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err, null);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ let parsedUrl = url.parse(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? https : http;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ this.handlers.forEach(handler => {
+ handler.prepareRequest(info.options);
+ });
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+ }
+ return additionalHeaders[header] || clientHeader || _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ let proxyUrl = pm.getProxyUrl(parsedUrl);
+ let useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (this._keepAlive && !useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (!!agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (!!this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+ }
+ if (useProxy) {
+ // If using proxy, need tunnel
+ if (!tunnel) {
+ tunnel = __webpack_require__(856);
+ }
+ const agentOptions = {
+ maxSockets: maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: {
+ proxyAuth: proxyUrl.auth,
+ host: proxyUrl.hostname,
+ port: proxyUrl.port
+ }
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if reusing agent across request and tunneling agent isn't assigned create a new agent
+ if (this._keepAlive && !agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
+ agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+ this._agent = agent;
+ }
+ // if not using private agent and tunnel agent isn't setup then use global agent
+ if (!agent) {
+ agent = usingSsl ? https.globalAgent : http.globalAgent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ }
+ static dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ let a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ async _processResponse(res, options) {
+ return new Promise(async (resolve, reject) => {
+ const statusCode = res.message.statusCode;
+ const response = {
+ statusCode: statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode == HttpCodes.NotFound) {
+ resolve(response);
+ }
+ let obj;
+ let contents;
+ // get the result from the body
+ try {
+ contents = await res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = 'Failed request: (' + statusCode + ')';
+ }
+ let err = new Error(msg);
+ // attach statusCode and body obj (if available) to the error object
+ err['statusCode'] = statusCode;
+ if (response.result) {
+ err['result'] = response.result;
+ }
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ });
+ }
+}
+exports.HttpClient = HttpClient;
+
+
+/***/ }),
+
+/***/ 550:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getNextPage
+
+const getPage = __webpack_require__(265)
+
+function getNextPage (octokit, link, headers) {
+ return getPage(octokit, link, 'next', headers)
+}
+
+
+/***/ }),
+
+/***/ 558:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasPreviousPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasPreviousPage (link) {
+ deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).prev
+}
+
+
+/***/ }),
+
+/***/ 559:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var deprecation = __webpack_require__(692);
+
+var endpointsByScope = {
+ actions: {
+ cancelWorkflowRun: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/cancel"
+ },
+ createOrUpdateSecretForRepo: {
+ method: "PUT",
+ params: {
+ encrypted_value: {
+ type: "string"
+ },
+ key_id: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ createRegistrationToken: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/registration-token"
+ },
+ createRemoveToken: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/remove-token"
+ },
+ deleteArtifact: {
+ method: "DELETE",
+ params: {
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
+ },
+ deleteSecretFromRepo: {
+ method: "DELETE",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ downloadArtifact: {
+ method: "GET",
+ params: {
+ archive_format: {
+ required: true,
+ type: "string"
+ },
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format"
+ },
+ getArtifact: {
+ method: "GET",
+ params: {
+ artifact_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
+ },
+ getPublicKey: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/public-key"
+ },
+ getSecret: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets/:name"
+ },
+ getSelfHostedRunner: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ runner_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/:runner_id"
+ },
+ getWorkflow: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ workflow_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows/:workflow_id"
+ },
+ getWorkflowJob: {
+ method: "GET",
+ params: {
+ job_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/jobs/:job_id"
+ },
+ getWorkflowRun: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id"
+ },
+ listDownloadsForSelfHostedRunnerApplication: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/downloads"
+ },
+ listJobsForWorkflowRun: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/jobs"
+ },
+ listRepoWorkflowRuns: {
+ method: "GET",
+ params: {
+ actor: {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ event: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["completed", "status", "conclusion"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs"
+ },
+ listRepoWorkflows: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows"
+ },
+ listSecretsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/secrets"
+ },
+ listSelfHostedRunnersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners"
+ },
+ listWorkflowJobLogs: {
+ method: "GET",
+ params: {
+ job_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/jobs/:job_id/logs"
+ },
+ listWorkflowRunArtifacts: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts"
+ },
+ listWorkflowRunLogs: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/logs"
+ },
+ listWorkflowRuns: {
+ method: "GET",
+ params: {
+ actor: {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ event: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["completed", "status", "conclusion"],
+ type: "string"
+ },
+ workflow_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs"
+ },
+ reRunWorkflow: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ run_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runs/:run_id/rerun"
+ },
+ removeSelfHostedRunner: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ runner_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/actions/runners/:runner_id"
+ }
+ },
+ activity: {
+ checkStarringRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ },
+ deleteRepoSubscription: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ deleteThreadSubscription: {
+ method: "DELETE",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ getRepoSubscription: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ getThread: {
+ method: "GET",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id"
+ },
+ getThreadSubscription: {
+ method: "GET",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ listEventsForOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events/orgs/:org"
+ },
+ listEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events"
+ },
+ listFeeds: {
+ method: "GET",
+ params: {},
+ url: "/feeds"
+ },
+ listNotifications: {
+ method: "GET",
+ params: {
+ all: {
+ type: "boolean"
+ },
+ before: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ participating: {
+ type: "boolean"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/notifications"
+ },
+ listNotificationsForRepo: {
+ method: "GET",
+ params: {
+ all: {
+ type: "boolean"
+ },
+ before: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ participating: {
+ type: "boolean"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/notifications"
+ },
+ listPublicEvents: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/events"
+ },
+ listPublicEventsForOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/events"
+ },
+ listPublicEventsForRepoNetwork: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/networks/:owner/:repo/events"
+ },
+ listPublicEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/events/public"
+ },
+ listReceivedEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/received_events"
+ },
+ listReceivedPublicEventsForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/received_events/public"
+ },
+ listRepoEvents: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/events"
+ },
+ listReposStarredByAuthenticatedUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/user/starred"
+ },
+ listReposStarredByUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/starred"
+ },
+ listReposWatchedByUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/subscriptions"
+ },
+ listStargazersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stargazers"
+ },
+ listWatchedReposForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/subscriptions"
+ },
+ listWatchersForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/subscribers"
+ },
+ markAsRead: {
+ method: "PUT",
+ params: {
+ last_read_at: {
+ type: "string"
+ }
+ },
+ url: "/notifications"
+ },
+ markNotificationsAsReadForRepo: {
+ method: "PUT",
+ params: {
+ last_read_at: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/notifications"
+ },
+ markThreadAsRead: {
+ method: "PATCH",
+ params: {
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id"
+ },
+ setRepoSubscription: {
+ method: "PUT",
+ params: {
+ ignored: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ subscribed: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/subscription"
+ },
+ setThreadSubscription: {
+ method: "PUT",
+ params: {
+ ignored: {
+ type: "boolean"
+ },
+ thread_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/notifications/threads/:thread_id/subscription"
+ },
+ starRepo: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ },
+ unstarRepo: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/starred/:owner/:repo"
+ }
+ },
+ apps: {
+ addRepoToInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "PUT",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories/:repository_id"
+ },
+ checkAccountIsAssociatedWithAny: {
+ method: "GET",
+ params: {
+ account_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/accounts/:account_id"
+ },
+ checkAccountIsAssociatedWithAnyStubbed: {
+ method: "GET",
+ params: {
+ account_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/stubbed/accounts/:account_id"
+ },
+ checkAuthorization: {
+ deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization",
+ method: "GET",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ checkToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "POST",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ createContentAttachment: {
+ headers: {
+ accept: "application/vnd.github.corsair-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ content_reference_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/content_references/:content_reference_id/attachments"
+ },
+ createFromManifest: {
+ headers: {
+ accept: "application/vnd.github.fury-preview+json"
+ },
+ method: "POST",
+ params: {
+ code: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/app-manifests/:code/conversions"
+ },
+ createInstallationToken: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "POST",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ permissions: {
+ type: "object"
+ },
+ repository_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/app/installations/:installation_id/access_tokens"
+ },
+ deleteAuthorization: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grant"
+ },
+ deleteInstallation: {
+ headers: {
+ accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/app/installations/:installation_id"
+ },
+ deleteToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ findOrgInstallation: {
+ deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/installation"
+ },
+ findRepoInstallation: {
+ deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/installation"
+ },
+ findUserInstallation: {
+ deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)",
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/installation"
+ },
+ getAuthenticated: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {},
+ url: "/app"
+ },
+ getBySlug: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ app_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/apps/:app_slug"
+ },
+ getInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/app/installations/:installation_id"
+ },
+ getOrgInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/installation"
+ },
+ getRepoInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/installation"
+ },
+ getUserInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/installation"
+ },
+ listAccountsUserOrOrgOnPlan: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ plan_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/marketplace_listing/plans/:plan_id/accounts"
+ },
+ listAccountsUserOrOrgOnPlanStubbed: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ plan_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/marketplace_listing/stubbed/plans/:plan_id/accounts"
+ },
+ listInstallationReposForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories"
+ },
+ listInstallations: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/app/installations"
+ },
+ listInstallationsForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/installations"
+ },
+ listMarketplacePurchasesForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/marketplace_purchases"
+ },
+ listMarketplacePurchasesForAuthenticatedUserStubbed: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/marketplace_purchases/stubbed"
+ },
+ listPlans: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/plans"
+ },
+ listPlansStubbed: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/marketplace_listing/stubbed/plans"
+ },
+ listRepos: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/installation/repositories"
+ },
+ removeRepoFromInstallation: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ installation_id: {
+ required: true,
+ type: "integer"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/installations/:installation_id/repositories/:repository_id"
+ },
+ resetAuthorization: {
+ deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization",
+ method: "POST",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ resetToken: {
+ headers: {
+ accept: "application/vnd.github.doctor-strange-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ access_token: {
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/token"
+ },
+ revokeAuthorizationForApplication: {
+ deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeGrantForApplication: {
+ deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grants/:access_token"
+ },
+ revokeInstallationToken: {
+ headers: {
+ accept: "application/vnd.github.gambit-preview+json"
+ },
+ method: "DELETE",
+ params: {},
+ url: "/installation/token"
+ }
+ },
+ checks: {
+ create: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ actions: {
+ type: "object[]"
+ },
+ "actions[].description": {
+ required: true,
+ type: "string"
+ },
+ "actions[].identifier": {
+ required: true,
+ type: "string"
+ },
+ "actions[].label": {
+ required: true,
+ type: "string"
+ },
+ completed_at: {
+ type: "string"
+ },
+ conclusion: {
+ enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
+ type: "string"
+ },
+ details_url: {
+ type: "string"
+ },
+ external_id: {
+ type: "string"
+ },
+ head_sha: {
+ required: true,
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ output: {
+ type: "object"
+ },
+ "output.annotations": {
+ type: "object[]"
+ },
+ "output.annotations[].annotation_level": {
+ enum: ["notice", "warning", "failure"],
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].end_column": {
+ type: "integer"
+ },
+ "output.annotations[].end_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].message": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].path": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].raw_details": {
+ type: "string"
+ },
+ "output.annotations[].start_column": {
+ type: "integer"
+ },
+ "output.annotations[].start_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].title": {
+ type: "string"
+ },
+ "output.images": {
+ type: "object[]"
+ },
+ "output.images[].alt": {
+ required: true,
+ type: "string"
+ },
+ "output.images[].caption": {
+ type: "string"
+ },
+ "output.images[].image_url": {
+ required: true,
+ type: "string"
+ },
+ "output.summary": {
+ required: true,
+ type: "string"
+ },
+ "output.text": {
+ type: "string"
+ },
+ "output.title": {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ started_at: {
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs"
+ },
+ createSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ head_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites"
+ },
+ get: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id"
+ },
+ getSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id"
+ },
+ listAnnotations: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations"
+ },
+ listForRef: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_name: {
+ type: "string"
+ },
+ filter: {
+ enum: ["latest", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/check-runs"
+ },
+ listForSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ check_name: {
+ type: "string"
+ },
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ filter: {
+ enum: ["latest", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs"
+ },
+ listSuitesForRef: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "GET",
+ params: {
+ app_id: {
+ type: "integer"
+ },
+ check_name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/check-suites"
+ },
+ rerequestSuite: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "POST",
+ params: {
+ check_suite_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest"
+ },
+ setSuitesPreferences: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ auto_trigger_checks: {
+ type: "object[]"
+ },
+ "auto_trigger_checks[].app_id": {
+ required: true,
+ type: "integer"
+ },
+ "auto_trigger_checks[].setting": {
+ required: true,
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-suites/preferences"
+ },
+ update: {
+ headers: {
+ accept: "application/vnd.github.antiope-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ actions: {
+ type: "object[]"
+ },
+ "actions[].description": {
+ required: true,
+ type: "string"
+ },
+ "actions[].identifier": {
+ required: true,
+ type: "string"
+ },
+ "actions[].label": {
+ required: true,
+ type: "string"
+ },
+ check_run_id: {
+ required: true,
+ type: "integer"
+ },
+ completed_at: {
+ type: "string"
+ },
+ conclusion: {
+ enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
+ type: "string"
+ },
+ details_url: {
+ type: "string"
+ },
+ external_id: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ output: {
+ type: "object"
+ },
+ "output.annotations": {
+ type: "object[]"
+ },
+ "output.annotations[].annotation_level": {
+ enum: ["notice", "warning", "failure"],
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].end_column": {
+ type: "integer"
+ },
+ "output.annotations[].end_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].message": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].path": {
+ required: true,
+ type: "string"
+ },
+ "output.annotations[].raw_details": {
+ type: "string"
+ },
+ "output.annotations[].start_column": {
+ type: "integer"
+ },
+ "output.annotations[].start_line": {
+ required: true,
+ type: "integer"
+ },
+ "output.annotations[].title": {
+ type: "string"
+ },
+ "output.images": {
+ type: "object[]"
+ },
+ "output.images[].alt": {
+ required: true,
+ type: "string"
+ },
+ "output.images[].caption": {
+ type: "string"
+ },
+ "output.images[].image_url": {
+ required: true,
+ type: "string"
+ },
+ "output.summary": {
+ required: true,
+ type: "string"
+ },
+ "output.text": {
+ type: "string"
+ },
+ "output.title": {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ started_at: {
+ type: "string"
+ },
+ status: {
+ enum: ["queued", "in_progress", "completed"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/check-runs/:check_run_id"
+ }
+ },
+ codesOfConduct: {
+ getConductCode: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {
+ key: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/codes_of_conduct/:key"
+ },
+ getForRepo: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/community/code_of_conduct"
+ },
+ listConductCodes: {
+ headers: {
+ accept: "application/vnd.github.scarlet-witch-preview+json"
+ },
+ method: "GET",
+ params: {},
+ url: "/codes_of_conduct"
+ }
+ },
+ emojis: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/emojis"
+ }
+ },
+ gists: {
+ checkIsStarred: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ create: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ files: {
+ required: true,
+ type: "object"
+ },
+ "files.content": {
+ type: "string"
+ },
+ public: {
+ type: "boolean"
+ }
+ },
+ url: "/gists"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments"
+ },
+ delete: {
+ method: "DELETE",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ },
+ fork: {
+ method: "POST",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/forks"
+ },
+ get: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ },
+ getRevision: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/:sha"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/commits"
+ },
+ listForks: {
+ method: "GET",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/gists/:gist_id/forks"
+ },
+ listPublic: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists/public"
+ },
+ listPublicForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/gists"
+ },
+ listStarred: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/gists/starred"
+ },
+ star: {
+ method: "PUT",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ unstar: {
+ method: "DELETE",
+ params: {
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/star"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ files: {
+ type: "object"
+ },
+ "files.content": {
+ type: "string"
+ },
+ "files.filename": {
+ type: "string"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ gist_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gists/:gist_id/comments/:comment_id"
+ }
+ },
+ git: {
+ createBlob: {
+ method: "POST",
+ params: {
+ content: {
+ required: true,
+ type: "string"
+ },
+ encoding: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/blobs"
+ },
+ createCommit: {
+ method: "POST",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.date": {
+ type: "string"
+ },
+ "author.email": {
+ type: "string"
+ },
+ "author.name": {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.date": {
+ type: "string"
+ },
+ "committer.email": {
+ type: "string"
+ },
+ "committer.name": {
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ parents: {
+ required: true,
+ type: "string[]"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ signature: {
+ type: "string"
+ },
+ tree: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/commits"
+ },
+ createRef: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs"
+ },
+ createTag: {
+ method: "POST",
+ params: {
+ message: {
+ required: true,
+ type: "string"
+ },
+ object: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag: {
+ required: true,
+ type: "string"
+ },
+ tagger: {
+ type: "object"
+ },
+ "tagger.date": {
+ type: "string"
+ },
+ "tagger.email": {
+ type: "string"
+ },
+ "tagger.name": {
+ type: "string"
+ },
+ type: {
+ enum: ["commit", "tree", "blob"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/tags"
+ },
+ createTree: {
+ method: "POST",
+ params: {
+ base_tree: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tree: {
+ required: true,
+ type: "object[]"
+ },
+ "tree[].content": {
+ type: "string"
+ },
+ "tree[].mode": {
+ enum: ["100644", "100755", "040000", "160000", "120000"],
+ type: "string"
+ },
+ "tree[].path": {
+ type: "string"
+ },
+ "tree[].sha": {
+ allowNull: true,
+ type: "string"
+ },
+ "tree[].type": {
+ enum: ["blob", "tree", "commit"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/trees"
+ },
+ deleteRef: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:ref"
+ },
+ getBlob: {
+ method: "GET",
+ params: {
+ file_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/blobs/:file_sha"
+ },
+ getCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/commits/:commit_sha"
+ },
+ getRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/ref/:ref"
+ },
+ getTag: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/tags/:tag_sha"
+ },
+ getTree: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ recursive: {
+ enum: ["1"],
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tree_sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/trees/:tree_sha"
+ },
+ listMatchingRefs: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/matching-refs/:ref"
+ },
+ listRefs: {
+ method: "GET",
+ params: {
+ namespace: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:namespace"
+ },
+ updateRef: {
+ method: "PATCH",
+ params: {
+ force: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/git/refs/:ref"
+ }
+ },
+ gitignore: {
+ getTemplate: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/gitignore/templates/:name"
+ },
+ listTemplates: {
+ method: "GET",
+ params: {},
+ url: "/gitignore/templates"
+ }
+ },
+ interactions: {
+ addOrUpdateRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "PUT",
+ params: {
+ limit: {
+ enum: ["existing_users", "contributors_only", "collaborators_only"],
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ addOrUpdateRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "PUT",
+ params: {
+ limit: {
+ enum: ["existing_users", "contributors_only", "collaborators_only"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ },
+ getRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ getRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ },
+ removeRestrictionsForOrg: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/interaction-limits"
+ },
+ removeRestrictionsForRepo: {
+ headers: {
+ accept: "application/vnd.github.sombra-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/interaction-limits"
+ }
+ },
+ issues: {
+ addAssignees: {
+ method: "POST",
+ params: {
+ assignees: {
+ type: "string[]"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/assignees"
+ },
+ addLabels: {
+ method: "POST",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ required: true,
+ type: "string[]"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ checkAssignee: {
+ method: "GET",
+ params: {
+ assignee: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/assignees/:assignee"
+ },
+ create: {
+ method: "POST",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ assignees: {
+ type: "string[]"
+ },
+ body: {
+ type: "string"
+ },
+ labels: {
+ type: "string[]"
+ },
+ milestone: {
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/comments"
+ },
+ createLabel: {
+ method: "POST",
+ params: {
+ color: {
+ required: true,
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels"
+ },
+ createMilestone: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ due_on: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ deleteLabel: {
+ method: "DELETE",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:name"
+ },
+ deleteMilestone: {
+ method: "DELETE",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ },
+ get: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ getEvent: {
+ method: "GET",
+ params: {
+ event_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/events/:event_id"
+ },
+ getLabel: {
+ method: "GET",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:name"
+ },
+ getMilestone: {
+ method: "GET",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ },
+ list: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/issues"
+ },
+ listAssignees: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/assignees"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/comments"
+ },
+ listCommentsForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments"
+ },
+ listEvents: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/events"
+ },
+ listEventsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/events"
+ },
+ listEventsForTimeline: {
+ headers: {
+ accept: "application/vnd.github.mockingbird-preview+json"
+ },
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/timeline"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/user/issues"
+ },
+ listForOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ filter: {
+ enum: ["assigned", "created", "mentioned", "subscribed", "all"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/issues"
+ },
+ listForRepo: {
+ method: "GET",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ creator: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ labels: {
+ type: "string"
+ },
+ mentioned: {
+ type: "string"
+ },
+ milestone: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "comments"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues"
+ },
+ listLabelsForMilestone: {
+ method: "GET",
+ params: {
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number/labels"
+ },
+ listLabelsForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels"
+ },
+ listLabelsOnIssue: {
+ method: "GET",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ listMilestonesForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["due_on", "completeness"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones"
+ },
+ lock: {
+ method: "PUT",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ lock_reason: {
+ enum: ["off-topic", "too heated", "resolved", "spam"],
+ type: "string"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/lock"
+ },
+ removeAssignees: {
+ method: "DELETE",
+ params: {
+ assignees: {
+ type: "string[]"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/assignees"
+ },
+ removeLabel: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels/:name"
+ },
+ removeLabels: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ replaceLabels: {
+ method: "PUT",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ type: "string[]"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/labels"
+ },
+ unlock: {
+ method: "DELETE",
+ params: {
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/lock"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ assignee: {
+ type: "string"
+ },
+ assignees: {
+ type: "string[]"
+ },
+ body: {
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ labels: {
+ type: "string[]"
+ },
+ milestone: {
+ allowNull: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id"
+ },
+ updateLabel: {
+ method: "PATCH",
+ params: {
+ color: {
+ type: "string"
+ },
+ current_name: {
+ required: true,
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/labels/:current_name"
+ },
+ updateMilestone: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ due_on: {
+ type: "string"
+ },
+ milestone_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "milestone_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/milestones/:milestone_number"
+ }
+ },
+ licenses: {
+ get: {
+ method: "GET",
+ params: {
+ license: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/licenses/:license"
+ },
+ getForRepo: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/license"
+ },
+ list: {
+ deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)",
+ method: "GET",
+ params: {},
+ url: "/licenses"
+ },
+ listCommonlyUsed: {
+ method: "GET",
+ params: {},
+ url: "/licenses"
+ }
+ },
+ markdown: {
+ render: {
+ method: "POST",
+ params: {
+ context: {
+ type: "string"
+ },
+ mode: {
+ enum: ["markdown", "gfm"],
+ type: "string"
+ },
+ text: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/markdown"
+ },
+ renderRaw: {
+ headers: {
+ "content-type": "text/plain; charset=utf-8"
+ },
+ method: "POST",
+ params: {
+ data: {
+ mapTo: "data",
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/markdown/raw"
+ }
+ },
+ meta: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/meta"
+ }
+ },
+ migrations: {
+ cancelImport: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ deleteArchiveForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id/archive"
+ },
+ deleteArchiveForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ downloadArchiveForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ getArchiveForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id/archive"
+ },
+ getArchiveForOrg: {
+ deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)",
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/archive"
+ },
+ getCommitAuthors: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/authors"
+ },
+ getImportProgress: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ getLargeFiles: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/large_files"
+ },
+ getStatusForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/migrations/:migration_id"
+ },
+ getStatusForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id"
+ },
+ listForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/migrations"
+ },
+ listForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/migrations"
+ },
+ listReposForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/repositories"
+ },
+ listReposForUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "GET",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/:migration_id/repositories"
+ },
+ mapCommitAuthor: {
+ method: "PATCH",
+ params: {
+ author_id: {
+ required: true,
+ type: "integer"
+ },
+ email: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/authors/:author_id"
+ },
+ setLfsPreference: {
+ method: "PATCH",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ use_lfs: {
+ enum: ["opt_in", "opt_out"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import/lfs"
+ },
+ startForAuthenticatedUser: {
+ method: "POST",
+ params: {
+ exclude_attachments: {
+ type: "boolean"
+ },
+ lock_repositories: {
+ type: "boolean"
+ },
+ repositories: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/migrations"
+ },
+ startForOrg: {
+ method: "POST",
+ params: {
+ exclude_attachments: {
+ type: "boolean"
+ },
+ lock_repositories: {
+ type: "boolean"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ repositories: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/orgs/:org/migrations"
+ },
+ startImport: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tfvc_project: {
+ type: "string"
+ },
+ vcs: {
+ enum: ["subversion", "git", "mercurial", "tfvc"],
+ type: "string"
+ },
+ vcs_password: {
+ type: "string"
+ },
+ vcs_url: {
+ required: true,
+ type: "string"
+ },
+ vcs_username: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ },
+ unlockRepoForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ repo_name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/migrations/:migration_id/repos/:repo_name/lock"
+ },
+ unlockRepoForOrg: {
+ headers: {
+ accept: "application/vnd.github.wyandotte-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ migration_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ repo_name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock"
+ },
+ updateImport: {
+ method: "PATCH",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ vcs_password: {
+ type: "string"
+ },
+ vcs_username: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/import"
+ }
+ },
+ oauthAuthorizations: {
+ checkAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)",
+ method: "GET",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ createAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization",
+ method: "POST",
+ params: {
+ client_id: {
+ type: "string"
+ },
+ client_secret: {
+ type: "string"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ required: true,
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations"
+ },
+ deleteAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization",
+ method: "DELETE",
+ params: {
+ authorization_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ },
+ deleteGrant: {
+ deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant",
+ method: "DELETE",
+ params: {
+ grant_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/applications/grants/:grant_id"
+ },
+ getAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization",
+ method: "GET",
+ params: {
+ authorization_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ },
+ getGrant: {
+ deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant",
+ method: "GET",
+ params: {
+ grant_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/applications/grants/:grant_id"
+ },
+ getOrCreateAuthorizationForApp: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id"
+ },
+ getOrCreateAuthorizationForAppAndFingerprint: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ required: true,
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id/:fingerprint"
+ },
+ getOrCreateAuthorizationForAppFingerprint: {
+ deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)",
+ method: "PUT",
+ params: {
+ client_id: {
+ required: true,
+ type: "string"
+ },
+ client_secret: {
+ required: true,
+ type: "string"
+ },
+ fingerprint: {
+ required: true,
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/clients/:client_id/:fingerprint"
+ },
+ listAuthorizations: {
+ deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/authorizations"
+ },
+ listGrants: {
+ deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/applications/grants"
+ },
+ resetAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)",
+ method: "POST",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeAuthorizationForApplication: {
+ deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/tokens/:access_token"
+ },
+ revokeGrantForApplication: {
+ deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)",
+ method: "DELETE",
+ params: {
+ access_token: {
+ required: true,
+ type: "string"
+ },
+ client_id: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/applications/:client_id/grants/:access_token"
+ },
+ updateAuthorization: {
+ deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization",
+ method: "PATCH",
+ params: {
+ add_scopes: {
+ type: "string[]"
+ },
+ authorization_id: {
+ required: true,
+ type: "integer"
+ },
+ fingerprint: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ },
+ note_url: {
+ type: "string"
+ },
+ remove_scopes: {
+ type: "string[]"
+ },
+ scopes: {
+ type: "string[]"
+ }
+ },
+ url: "/authorizations/:authorization_id"
+ }
+ },
+ orgs: {
+ addOrUpdateMembership: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["admin", "member"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ blockUser: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ checkBlockedUser: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ checkMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members/:username"
+ },
+ checkPublicMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ concealMembership: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ convertMemberToOutsideCollaborator: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators/:username"
+ },
+ createHook: {
+ method: "POST",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ required: true,
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks"
+ },
+ createInvitation: {
+ method: "POST",
+ params: {
+ email: {
+ type: "string"
+ },
+ invitee_id: {
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["admin", "direct_member", "billing_manager"],
+ type: "string"
+ },
+ team_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/orgs/:org/invitations"
+ },
+ deleteHook: {
+ method: "DELETE",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ get: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org"
+ },
+ getHook: {
+ method: "GET",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ getMembership: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ getMembershipForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs/:org"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "integer"
+ }
+ },
+ url: "/organizations"
+ },
+ listBlockedUsers: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/orgs"
+ },
+ listForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/orgs"
+ },
+ listHooks: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/hooks"
+ },
+ listInstallations: {
+ headers: {
+ accept: "application/vnd.github.machine-man-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/installations"
+ },
+ listInvitationTeams: {
+ method: "GET",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/invitations/:invitation_id/teams"
+ },
+ listMembers: {
+ method: "GET",
+ params: {
+ filter: {
+ enum: ["2fa_disabled", "all"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["all", "admin", "member"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members"
+ },
+ listMemberships: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["active", "pending"],
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs"
+ },
+ listOutsideCollaborators: {
+ method: "GET",
+ params: {
+ filter: {
+ enum: ["2fa_disabled", "all"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators"
+ },
+ listPendingInvitations: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/invitations"
+ },
+ listPublicMembers: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/public_members"
+ },
+ pingHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id/pings"
+ },
+ publicizeMembership: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/public_members/:username"
+ },
+ removeMember: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/members/:username"
+ },
+ removeMembership: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/memberships/:username"
+ },
+ removeOutsideCollaborator: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/outside_collaborators/:username"
+ },
+ unblockUser: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/blocks/:username"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ billing_email: {
+ type: "string"
+ },
+ company: {
+ type: "string"
+ },
+ default_repository_permission: {
+ enum: ["read", "write", "admin", "none"],
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ email: {
+ type: "string"
+ },
+ has_organization_projects: {
+ type: "boolean"
+ },
+ has_repository_projects: {
+ type: "boolean"
+ },
+ location: {
+ type: "string"
+ },
+ members_allowed_repository_creation_type: {
+ enum: ["all", "private", "none"],
+ type: "string"
+ },
+ members_can_create_internal_repositories: {
+ type: "boolean"
+ },
+ members_can_create_private_repositories: {
+ type: "boolean"
+ },
+ members_can_create_public_repositories: {
+ type: "boolean"
+ },
+ members_can_create_repositories: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org"
+ },
+ updateHook: {
+ method: "PATCH",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/hooks/:hook_id"
+ },
+ updateMembership: {
+ method: "PATCH",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["active"],
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/memberships/orgs/:org"
+ }
+ },
+ projects: {
+ addCollaborator: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username"
+ },
+ createCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ content_id: {
+ type: "integer"
+ },
+ content_type: {
+ type: "string"
+ },
+ note: {
+ type: "string"
+ }
+ },
+ url: "/projects/columns/:column_id/cards"
+ },
+ createColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ name: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/columns"
+ },
+ createForAuthenticatedUser: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/projects"
+ },
+ createForOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/projects"
+ },
+ createForRepo: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/projects"
+ },
+ delete: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ deleteCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ deleteColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ },
+ get: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ getCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ getColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ },
+ listCards: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ archived_state: {
+ enum: ["all", "archived", "not_archived"],
+ type: "string"
+ },
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/projects/columns/:column_id/cards"
+ },
+ listCollaborators: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ affiliation: {
+ enum: ["outside", "direct", "all"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/collaborators"
+ },
+ listColumns: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/projects/:project_id/columns"
+ },
+ listForOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/projects"
+ },
+ listForRepo: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/projects"
+ },
+ listForUser: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/projects"
+ },
+ moveCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ card_id: {
+ required: true,
+ type: "integer"
+ },
+ column_id: {
+ type: "integer"
+ },
+ position: {
+ required: true,
+ type: "string",
+ validation: "^(top|bottom|after:\\d+)$"
+ }
+ },
+ url: "/projects/columns/cards/:card_id/moves"
+ },
+ moveColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "POST",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ position: {
+ required: true,
+ type: "string",
+ validation: "^(first|last|after:\\d+)$"
+ }
+ },
+ url: "/projects/columns/:column_id/moves"
+ },
+ removeCollaborator: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username"
+ },
+ reviewUserPermissionLevel: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id/collaborators/:username/permission"
+ },
+ update: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ organization_permission: {
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ }
+ },
+ url: "/projects/:project_id"
+ },
+ updateCard: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ archived: {
+ type: "boolean"
+ },
+ card_id: {
+ required: true,
+ type: "integer"
+ },
+ note: {
+ type: "string"
+ }
+ },
+ url: "/projects/columns/cards/:card_id"
+ },
+ updateColumn: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PATCH",
+ params: {
+ column_id: {
+ required: true,
+ type: "integer"
+ },
+ name: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/projects/columns/:column_id"
+ }
+ },
+ pulls: {
+ checkIfMerged: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/merge"
+ },
+ create: {
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ createComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_id: {
+ required: true,
+ type: "string"
+ },
+ in_reply_to: {
+ deprecated: true,
+ description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
+ type: "integer"
+ },
+ line: {
+ type: "integer"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ side: {
+ enum: ["LEFT", "RIGHT"],
+ type: "string"
+ },
+ start_line: {
+ type: "integer"
+ },
+ start_side: {
+ enum: ["LEFT", "RIGHT", "side"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ createCommentReply: {
+ deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_id: {
+ required: true,
+ type: "string"
+ },
+ in_reply_to: {
+ deprecated: true,
+ description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
+ type: "integer"
+ },
+ line: {
+ type: "integer"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ side: {
+ enum: ["LEFT", "RIGHT"],
+ type: "string"
+ },
+ start_line: {
+ type: "integer"
+ },
+ start_side: {
+ enum: ["LEFT", "RIGHT", "side"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ createFromIssue: {
+ deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request",
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ issue: {
+ required: true,
+ type: "integer"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ createReview: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ comments: {
+ type: "object[]"
+ },
+ "comments[].body": {
+ required: true,
+ type: "string"
+ },
+ "comments[].path": {
+ required: true,
+ type: "string"
+ },
+ "comments[].position": {
+ required: true,
+ type: "integer"
+ },
+ commit_id: {
+ type: "string"
+ },
+ event: {
+ enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
+ },
+ createReviewCommentReply: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies"
+ },
+ createReviewRequest: {
+ method: "POST",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ reviewers: {
+ type: "string[]"
+ },
+ team_reviewers: {
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ deleteComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ deletePendingReview: {
+ method: "DELETE",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ },
+ deleteReviewRequest: {
+ method: "DELETE",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ reviewers: {
+ type: "string[]"
+ },
+ team_reviewers: {
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ dismissReview: {
+ method: "PUT",
+ params: {
+ message: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals"
+ },
+ get: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number"
+ },
+ getComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ getCommentsForReview: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments"
+ },
+ getReview: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ },
+ list: {
+ method: "GET",
+ params: {
+ base: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ head: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated", "popularity", "long-running"],
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed", "all"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls"
+ },
+ listComments: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/comments"
+ },
+ listCommentsForRepo: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/commits"
+ },
+ listFiles: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/files"
+ },
+ listReviewRequests: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
+ },
+ listReviews: {
+ method: "GET",
+ params: {
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
+ },
+ merge: {
+ method: "PUT",
+ params: {
+ commit_message: {
+ type: "string"
+ },
+ commit_title: {
+ type: "string"
+ },
+ merge_method: {
+ enum: ["merge", "squash", "rebase"],
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/merge"
+ },
+ submitReview: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ event: {
+ enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ base: {
+ type: "string"
+ },
+ body: {
+ type: "string"
+ },
+ maintainer_can_modify: {
+ type: "boolean"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["open", "closed"],
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number"
+ },
+ updateBranch: {
+ headers: {
+ accept: "application/vnd.github.lydian-preview+json"
+ },
+ method: "PUT",
+ params: {
+ expected_head_sha: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/update-branch"
+ },
+ updateComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id"
+ },
+ updateReview: {
+ method: "PUT",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ number: {
+ alias: "pull_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ pull_number: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ review_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
+ }
+ },
+ rateLimit: {
+ get: {
+ method: "GET",
+ params: {},
+ url: "/rate_limit"
+ }
+ },
+ reactions: {
+ createForCommitComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id/reactions"
+ },
+ createForIssue: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/reactions"
+ },
+ createForIssueComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
+ },
+ createForPullRequestReviewComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
+ },
+ createForTeamDiscussion: {
+ deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ createForTeamDiscussionComment: {
+ deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionCommentInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionCommentLegacy: {
+ deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ createForTeamDiscussionInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
+ },
+ createForTeamDiscussionLegacy: {
+ deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "POST",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ delete: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ reaction_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/reactions/:reaction_id"
+ },
+ listForCommitComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id/reactions"
+ },
+ listForIssue: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ issue_number: {
+ required: true,
+ type: "integer"
+ },
+ number: {
+ alias: "issue_number",
+ deprecated: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/:issue_number/reactions"
+ },
+ listForIssueComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
+ },
+ listForPullRequestReviewComment: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
+ },
+ listForTeamDiscussion: {
+ deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ },
+ listForTeamDiscussionComment: {
+ deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionCommentInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionCommentLegacy: {
+ deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
+ },
+ listForTeamDiscussionInOrg: {
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
+ },
+ listForTeamDiscussionLegacy: {
+ deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy",
+ headers: {
+ accept: "application/vnd.github.squirrel-girl-preview+json"
+ },
+ method: "GET",
+ params: {
+ content: {
+ enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/reactions"
+ }
+ },
+ repos: {
+ acceptInvitation: {
+ method: "PATCH",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations/:invitation_id"
+ },
+ addCollaborator: {
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ addDeployKey: {
+ method: "POST",
+ params: {
+ key: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ read_only: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys"
+ },
+ addProtectedBranchAdminEnforcement: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ addProtectedBranchAppRestrictions: {
+ method: "POST",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ addProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ addProtectedBranchRequiredStatusChecksContexts: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ addProtectedBranchTeamRestrictions: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ addProtectedBranchUserRestrictions: {
+ method: "POST",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ checkCollaborator: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ checkVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ compareCommits: {
+ method: "GET",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/compare/:base...:head"
+ },
+ createCommitComment: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ line: {
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ type: "string"
+ },
+ position: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ alias: "commit_sha",
+ deprecated: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/comments"
+ },
+ createDeployment: {
+ method: "POST",
+ params: {
+ auto_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ environment: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ payload: {
+ type: "string"
+ },
+ production_environment: {
+ type: "boolean"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ required_contexts: {
+ type: "string[]"
+ },
+ task: {
+ type: "string"
+ },
+ transient_environment: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments"
+ },
+ createDeploymentStatus: {
+ method: "POST",
+ params: {
+ auto_inactive: {
+ type: "boolean"
+ },
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ description: {
+ type: "string"
+ },
+ environment: {
+ enum: ["production", "staging", "qa"],
+ type: "string"
+ },
+ environment_url: {
+ type: "string"
+ },
+ log_url: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"],
+ required: true,
+ type: "string"
+ },
+ target_url: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
+ },
+ createDispatchEvent: {
+ method: "POST",
+ params: {
+ client_payload: {
+ type: "object"
+ },
+ event_type: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/dispatches"
+ },
+ createFile: {
+ deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ createForAuthenticatedUser: {
+ method: "POST",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ auto_init: {
+ type: "boolean"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ gitignore_template: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ license_template: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ type: "integer"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/user/repos"
+ },
+ createFork: {
+ method: "POST",
+ params: {
+ organization: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/forks"
+ },
+ createHook: {
+ method: "POST",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ config: {
+ required: true,
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks"
+ },
+ createInOrg: {
+ method: "POST",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ auto_init: {
+ type: "boolean"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ gitignore_template: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ license_template: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ type: "integer"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/repos"
+ },
+ createOrUpdateFile: {
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ createRelease: {
+ method: "POST",
+ params: {
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ prerelease: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_name: {
+ required: true,
+ type: "string"
+ },
+ target_commitish: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases"
+ },
+ createStatus: {
+ method: "POST",
+ params: {
+ context: {
+ type: "string"
+ },
+ description: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ },
+ state: {
+ enum: ["error", "failure", "pending", "success"],
+ required: true,
+ type: "string"
+ },
+ target_url: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/statuses/:sha"
+ },
+ createUsingTemplate: {
+ headers: {
+ accept: "application/vnd.github.baptiste-preview+json"
+ },
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ template_owner: {
+ required: true,
+ type: "string"
+ },
+ template_repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:template_owner/:template_repo/generate"
+ },
+ declineInvitation: {
+ method: "DELETE",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations/:invitation_id"
+ },
+ delete: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ deleteCommitComment: {
+ method: "DELETE",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ deleteDownload: {
+ method: "DELETE",
+ params: {
+ download_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads/:download_id"
+ },
+ deleteFile: {
+ method: "DELETE",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ type: "string"
+ },
+ "author.name": {
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ type: "string"
+ },
+ "committer.name": {
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ deleteHook: {
+ method: "DELETE",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ deleteInvitation: {
+ method: "DELETE",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations/:invitation_id"
+ },
+ deleteRelease: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ deleteReleaseAsset: {
+ method: "DELETE",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ disableAutomatedSecurityFixes: {
+ headers: {
+ accept: "application/vnd.github.london-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/automated-security-fixes"
+ },
+ disablePagesSite: {
+ headers: {
+ accept: "application/vnd.github.switcheroo-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ disableVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ enableAutomatedSecurityFixes: {
+ headers: {
+ accept: "application/vnd.github.london-preview+json"
+ },
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/automated-security-fixes"
+ },
+ enablePagesSite: {
+ headers: {
+ accept: "application/vnd.github.switcheroo-preview+json"
+ },
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ source: {
+ type: "object"
+ },
+ "source.branch": {
+ enum: ["master", "gh-pages"],
+ type: "string"
+ },
+ "source.path": {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ enableVulnerabilityAlerts: {
+ headers: {
+ accept: "application/vnd.github.dorian-preview+json"
+ },
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/vulnerability-alerts"
+ },
+ get: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ getAppsWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ getArchiveLink: {
+ method: "GET",
+ params: {
+ archive_format: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/:archive_format/:ref"
+ },
+ getBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch"
+ },
+ getBranchProtection: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ getClones: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ per: {
+ enum: ["day", "week"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/clones"
+ },
+ getCodeFrequencyStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/code_frequency"
+ },
+ getCollaboratorPermissionLevel: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username/permission"
+ },
+ getCombinedStatusForRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/status"
+ },
+ getCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ alias: "ref",
+ deprecated: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ alias: "ref",
+ deprecated: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref"
+ },
+ getCommitActivityStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/commit_activity"
+ },
+ getCommitComment: {
+ method: "GET",
+ params: {
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ getCommitRefSha: {
+ deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit",
+ headers: {
+ accept: "application/vnd.github.v3.sha"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref"
+ },
+ getContents: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ getContributorsStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/contributors"
+ },
+ getDeployKey: {
+ method: "GET",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys/:key_id"
+ },
+ getDeployment: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id"
+ },
+ getDeploymentStatus: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ status_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id"
+ },
+ getDownload: {
+ method: "GET",
+ params: {
+ download_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads/:download_id"
+ },
+ getHook: {
+ method: "GET",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ getLatestPagesBuild: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds/latest"
+ },
+ getLatestRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/latest"
+ },
+ getPages: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ getPagesBuild: {
+ method: "GET",
+ params: {
+ build_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds/:build_id"
+ },
+ getParticipationStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/participation"
+ },
+ getProtectedBranchAdminEnforcement: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ getProtectedBranchPullRequestReviewEnforcement: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ getProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ getProtectedBranchRequiredStatusChecks: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ getProtectedBranchRestrictions: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
+ },
+ getPunchCardStats: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/stats/punch_card"
+ },
+ getReadme: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/readme"
+ },
+ getRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ getReleaseAsset: {
+ method: "GET",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ getReleaseByTag: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/tags/:tag"
+ },
+ getTeamsWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ getTopPaths: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/popular/paths"
+ },
+ getTopReferrers: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/popular/referrers"
+ },
+ getUsersWithAccessToProtectedBranch: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ getViews: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ per: {
+ enum: ["day", "week"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/traffic/views"
+ },
+ list: {
+ method: "GET",
+ params: {
+ affiliation: {
+ type: "string"
+ },
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "owner", "public", "private", "member"],
+ type: "string"
+ },
+ visibility: {
+ enum: ["all", "public", "private"],
+ type: "string"
+ }
+ },
+ url: "/user/repos"
+ },
+ listAppsWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ listAssetsForRelease: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id/assets"
+ },
+ listBranches: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ protected: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches"
+ },
+ listBranchesForHeadCommit: {
+ headers: {
+ accept: "application/vnd.github.groot-preview+json"
+ },
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head"
+ },
+ listCollaborators: {
+ method: "GET",
+ params: {
+ affiliation: {
+ enum: ["outside", "direct", "all"],
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators"
+ },
+ listCommentsForCommit: {
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ alias: "commit_sha",
+ deprecated: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/comments"
+ },
+ listCommitComments: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments"
+ },
+ listCommits: {
+ method: "GET",
+ params: {
+ author: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ path: {
+ type: "string"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ },
+ since: {
+ type: "string"
+ },
+ until: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits"
+ },
+ listContributors: {
+ method: "GET",
+ params: {
+ anon: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contributors"
+ },
+ listDeployKeys: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys"
+ },
+ listDeploymentStatuses: {
+ method: "GET",
+ params: {
+ deployment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
+ },
+ listDeployments: {
+ method: "GET",
+ params: {
+ environment: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ },
+ task: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/deployments"
+ },
+ listDownloads: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/downloads"
+ },
+ listForOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "public", "private", "forks", "sources", "member", "internal"],
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/repos"
+ },
+ listForUser: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated", "pushed", "full_name"],
+ type: "string"
+ },
+ type: {
+ enum: ["all", "owner", "member"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/repos"
+ },
+ listForks: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["newest", "oldest", "stargazers"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/forks"
+ },
+ listHooks: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks"
+ },
+ listInvitations: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations"
+ },
+ listInvitationsForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/repository_invitations"
+ },
+ listLanguages: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/languages"
+ },
+ listPagesBuilds: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds"
+ },
+ listProtectedBranchRequiredStatusChecksContexts: {
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ listProtectedBranchTeamRestrictions: {
+ deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ listProtectedBranchUserRestrictions: {
+ deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ listPublic: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "integer"
+ }
+ },
+ url: "/repositories"
+ },
+ listPullRequestsAssociatedWithCommit: {
+ headers: {
+ accept: "application/vnd.github.groot-preview+json"
+ },
+ method: "GET",
+ params: {
+ commit_sha: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:commit_sha/pulls"
+ },
+ listReleases: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases"
+ },
+ listStatusesForRef: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ ref: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/commits/:ref/statuses"
+ },
+ listTags: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/tags"
+ },
+ listTeams: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/teams"
+ },
+ listTeamsWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ listTopics: {
+ headers: {
+ accept: "application/vnd.github.mercy-preview+json"
+ },
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/topics"
+ },
+ listUsersWithAccessToProtectedBranch: {
+ deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)",
+ method: "GET",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ merge: {
+ method: "POST",
+ params: {
+ base: {
+ required: true,
+ type: "string"
+ },
+ commit_message: {
+ type: "string"
+ },
+ head: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/merges"
+ },
+ pingHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id/pings"
+ },
+ removeBranchProtection: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ removeCollaborator: {
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/collaborators/:username"
+ },
+ removeDeployKey: {
+ method: "DELETE",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/keys/:key_id"
+ },
+ removeProtectedBranchAdminEnforcement: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
+ },
+ removeProtectedBranchAppRestrictions: {
+ method: "DELETE",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ removeProtectedBranchPullRequestReviewEnforcement: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ removeProtectedBranchRequiredSignatures: {
+ headers: {
+ accept: "application/vnd.github.zzzax-preview+json"
+ },
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
+ },
+ removeProtectedBranchRequiredStatusChecks: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ removeProtectedBranchRequiredStatusChecksContexts: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ removeProtectedBranchRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
+ },
+ removeProtectedBranchTeamRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ removeProtectedBranchUserRestrictions: {
+ method: "DELETE",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ replaceProtectedBranchAppRestrictions: {
+ method: "PUT",
+ params: {
+ apps: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
+ },
+ replaceProtectedBranchRequiredStatusChecksContexts: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
+ },
+ replaceProtectedBranchTeamRestrictions: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ teams: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
+ },
+ replaceProtectedBranchUserRestrictions: {
+ method: "PUT",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ users: {
+ mapTo: "data",
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
+ },
+ replaceTopics: {
+ headers: {
+ accept: "application/vnd.github.mercy-preview+json"
+ },
+ method: "PUT",
+ params: {
+ names: {
+ required: true,
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/topics"
+ },
+ requestPageBuild: {
+ method: "POST",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages/builds"
+ },
+ retrieveCommunityProfileMetrics: {
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/community/profile"
+ },
+ testPushHook: {
+ method: "POST",
+ params: {
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id/tests"
+ },
+ transfer: {
+ method: "POST",
+ params: {
+ new_owner: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_ids: {
+ type: "integer[]"
+ }
+ },
+ url: "/repos/:owner/:repo/transfer"
+ },
+ update: {
+ method: "PATCH",
+ params: {
+ allow_merge_commit: {
+ type: "boolean"
+ },
+ allow_rebase_merge: {
+ type: "boolean"
+ },
+ allow_squash_merge: {
+ type: "boolean"
+ },
+ archived: {
+ type: "boolean"
+ },
+ default_branch: {
+ type: "string"
+ },
+ delete_branch_on_merge: {
+ type: "boolean"
+ },
+ description: {
+ type: "string"
+ },
+ has_issues: {
+ type: "boolean"
+ },
+ has_projects: {
+ type: "boolean"
+ },
+ has_wiki: {
+ type: "boolean"
+ },
+ homepage: {
+ type: "string"
+ },
+ is_template: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ visibility: {
+ enum: ["public", "private", "visibility", "internal"],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo"
+ },
+ updateBranchProtection: {
+ method: "PUT",
+ params: {
+ allow_deletions: {
+ type: "boolean"
+ },
+ allow_force_pushes: {
+ allowNull: true,
+ type: "boolean"
+ },
+ branch: {
+ required: true,
+ type: "string"
+ },
+ enforce_admins: {
+ allowNull: true,
+ required: true,
+ type: "boolean"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ required_linear_history: {
+ type: "boolean"
+ },
+ required_pull_request_reviews: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "required_pull_request_reviews.dismiss_stale_reviews": {
+ type: "boolean"
+ },
+ "required_pull_request_reviews.dismissal_restrictions": {
+ type: "object"
+ },
+ "required_pull_request_reviews.dismissal_restrictions.teams": {
+ type: "string[]"
+ },
+ "required_pull_request_reviews.dismissal_restrictions.users": {
+ type: "string[]"
+ },
+ "required_pull_request_reviews.require_code_owner_reviews": {
+ type: "boolean"
+ },
+ "required_pull_request_reviews.required_approving_review_count": {
+ type: "integer"
+ },
+ required_status_checks: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "required_status_checks.contexts": {
+ required: true,
+ type: "string[]"
+ },
+ "required_status_checks.strict": {
+ required: true,
+ type: "boolean"
+ },
+ restrictions: {
+ allowNull: true,
+ required: true,
+ type: "object"
+ },
+ "restrictions.apps": {
+ type: "string[]"
+ },
+ "restrictions.teams": {
+ required: true,
+ type: "string[]"
+ },
+ "restrictions.users": {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection"
+ },
+ updateCommitComment: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/comments/:comment_id"
+ },
+ updateFile: {
+ deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
+ method: "PUT",
+ params: {
+ author: {
+ type: "object"
+ },
+ "author.email": {
+ required: true,
+ type: "string"
+ },
+ "author.name": {
+ required: true,
+ type: "string"
+ },
+ branch: {
+ type: "string"
+ },
+ committer: {
+ type: "object"
+ },
+ "committer.email": {
+ required: true,
+ type: "string"
+ },
+ "committer.name": {
+ required: true,
+ type: "string"
+ },
+ content: {
+ required: true,
+ type: "string"
+ },
+ message: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ path: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ sha: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/contents/:path"
+ },
+ updateHook: {
+ method: "PATCH",
+ params: {
+ active: {
+ type: "boolean"
+ },
+ add_events: {
+ type: "string[]"
+ },
+ config: {
+ type: "object"
+ },
+ "config.content_type": {
+ type: "string"
+ },
+ "config.insecure_ssl": {
+ type: "string"
+ },
+ "config.secret": {
+ type: "string"
+ },
+ "config.url": {
+ required: true,
+ type: "string"
+ },
+ events: {
+ type: "string[]"
+ },
+ hook_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ remove_events: {
+ type: "string[]"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/hooks/:hook_id"
+ },
+ updateInformationAboutPagesSite: {
+ method: "PUT",
+ params: {
+ cname: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ source: {
+ enum: ['"gh-pages"', '"master"', '"master /docs"'],
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/pages"
+ },
+ updateInvitation: {
+ method: "PATCH",
+ params: {
+ invitation_id: {
+ required: true,
+ type: "integer"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permissions: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/invitations/:invitation_id"
+ },
+ updateProtectedBranchPullRequestReviewEnforcement: {
+ method: "PATCH",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ dismiss_stale_reviews: {
+ type: "boolean"
+ },
+ dismissal_restrictions: {
+ type: "object"
+ },
+ "dismissal_restrictions.teams": {
+ type: "string[]"
+ },
+ "dismissal_restrictions.users": {
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ require_code_owner_reviews: {
+ type: "boolean"
+ },
+ required_approving_review_count: {
+ type: "integer"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
+ },
+ updateProtectedBranchRequiredStatusChecks: {
+ method: "PATCH",
+ params: {
+ branch: {
+ required: true,
+ type: "string"
+ },
+ contexts: {
+ type: "string[]"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ strict: {
+ type: "boolean"
+ }
+ },
+ url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
+ },
+ updateRelease: {
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ draft: {
+ type: "boolean"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ prerelease: {
+ type: "boolean"
+ },
+ release_id: {
+ required: true,
+ type: "integer"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ tag_name: {
+ type: "string"
+ },
+ target_commitish: {
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/:release_id"
+ },
+ updateReleaseAsset: {
+ method: "PATCH",
+ params: {
+ asset_id: {
+ required: true,
+ type: "integer"
+ },
+ label: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/repos/:owner/:repo/releases/assets/:asset_id"
+ },
+ uploadReleaseAsset: {
+ method: "POST",
+ params: {
+ data: {
+ mapTo: "data",
+ required: true,
+ type: "string | object"
+ },
+ file: {
+ alias: "data",
+ deprecated: true,
+ type: "string | object"
+ },
+ headers: {
+ required: true,
+ type: "object"
+ },
+ "headers.content-length": {
+ required: true,
+ type: "integer"
+ },
+ "headers.content-type": {
+ required: true,
+ type: "string"
+ },
+ label: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ url: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: ":url"
+ }
+ },
+ search: {
+ code: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["indexed"],
+ type: "string"
+ }
+ },
+ url: "/search/code"
+ },
+ commits: {
+ headers: {
+ accept: "application/vnd.github.cloak-preview+json"
+ },
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["author-date", "committer-date"],
+ type: "string"
+ }
+ },
+ url: "/search/commits"
+ },
+ issues: {
+ deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)",
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/issues"
+ },
+ issuesAndPullRequests: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/issues"
+ },
+ labels: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ repository_id: {
+ required: true,
+ type: "integer"
+ },
+ sort: {
+ enum: ["created", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/labels"
+ },
+ repos: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["stars", "forks", "help-wanted-issues", "updated"],
+ type: "string"
+ }
+ },
+ url: "/search/repositories"
+ },
+ topics: {
+ method: "GET",
+ params: {
+ q: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/search/topics"
+ },
+ users: {
+ method: "GET",
+ params: {
+ order: {
+ enum: ["desc", "asc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ q: {
+ required: true,
+ type: "string"
+ },
+ sort: {
+ enum: ["followers", "repositories", "joined"],
+ type: "string"
+ }
+ },
+ url: "/search/users"
+ }
+ },
+ teams: {
+ addMember: {
+ deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ addMemberLegacy: {
+ deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy",
+ method: "PUT",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ addOrUpdateMembership: {
+ deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ addOrUpdateMembershipInOrg: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ addOrUpdateMembershipLegacy: {
+ deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy",
+ method: "PUT",
+ params: {
+ role: {
+ enum: ["member", "maintainer"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ addOrUpdateProject: {
+ deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ addOrUpdateProjectInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ addOrUpdateProjectLegacy: {
+ deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "PUT",
+ params: {
+ permission: {
+ enum: ["read", "write", "admin"],
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ addOrUpdateRepo: {
+ deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)",
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ addOrUpdateRepoInOrg: {
+ method: "PUT",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ addOrUpdateRepoLegacy: {
+ deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy",
+ method: "PUT",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ checkManagesRepo: {
+ deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ checkManagesRepoInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ checkManagesRepoLegacy: {
+ deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy",
+ method: "GET",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ create: {
+ method: "POST",
+ params: {
+ description: {
+ type: "string"
+ },
+ maintainers: {
+ type: "string[]"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ repo_names: {
+ type: "string[]"
+ }
+ },
+ url: "/orgs/:org/teams"
+ },
+ createDiscussion: {
+ deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ createDiscussionComment: {
+ deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ createDiscussionCommentInOrg: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
+ },
+ createDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ createDiscussionInOrg: {
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions"
+ },
+ createDiscussionLegacy: {
+ deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy",
+ method: "POST",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ private: {
+ type: "boolean"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ delete: {
+ deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ deleteDiscussion: {
+ deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ deleteDiscussionComment: {
+ deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionCommentInOrg: {
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy",
+ method: "DELETE",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ deleteDiscussionInOrg: {
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ deleteDiscussionLegacy: {
+ deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy",
+ method: "DELETE",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ deleteInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ deleteLegacy: {
+ deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ get: {
+ deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ getByName: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ getDiscussion: {
+ deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ getDiscussionComment: {
+ deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionCommentInOrg: {
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy",
+ method: "GET",
+ params: {
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ getDiscussionInOrg: {
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ getDiscussionLegacy: {
+ deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy",
+ method: "GET",
+ params: {
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ getLegacy: {
+ deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ getMember: {
+ deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ getMemberLegacy: {
+ deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ getMembership: {
+ deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ getMembershipInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ getMembershipLegacy: {
+ deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy",
+ method: "GET",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ list: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/orgs/:org/teams"
+ },
+ listChild: {
+ deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/teams"
+ },
+ listChildInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/teams"
+ },
+ listChildLegacy: {
+ deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/teams"
+ },
+ listDiscussionComments: {
+ deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ listDiscussionCommentsInOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
+ },
+ listDiscussionCommentsLegacy: {
+ deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments"
+ },
+ listDiscussions: {
+ deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ listDiscussionsInOrg: {
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions"
+ },
+ listDiscussionsLegacy: {
+ deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy",
+ method: "GET",
+ params: {
+ direction: {
+ enum: ["asc", "desc"],
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions"
+ },
+ listForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/teams"
+ },
+ listMembers: {
+ deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/members"
+ },
+ listMembersInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/members"
+ },
+ listMembersLegacy: {
+ deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ role: {
+ enum: ["member", "maintainer", "all"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/members"
+ },
+ listPendingInvitations: {
+ deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/invitations"
+ },
+ listPendingInvitationsInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/invitations"
+ },
+ listPendingInvitationsLegacy: {
+ deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/invitations"
+ },
+ listProjects: {
+ deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects"
+ },
+ listProjectsInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects"
+ },
+ listProjectsLegacy: {
+ deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects"
+ },
+ listRepos: {
+ deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos"
+ },
+ listReposInOrg: {
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos"
+ },
+ listReposLegacy: {
+ deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy",
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos"
+ },
+ removeMember: {
+ deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ removeMemberLegacy: {
+ deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/members/:username"
+ },
+ removeMembership: {
+ deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ removeMembershipInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/memberships/:username"
+ },
+ removeMembershipLegacy: {
+ deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy",
+ method: "DELETE",
+ params: {
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/memberships/:username"
+ },
+ removeProject: {
+ deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ removeProjectInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ removeProjectLegacy: {
+ deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy",
+ method: "DELETE",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ removeRepo: {
+ deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)",
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ removeRepoInOrg: {
+ method: "DELETE",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
+ },
+ removeRepoLegacy: {
+ deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy",
+ method: "DELETE",
+ params: {
+ owner: {
+ required: true,
+ type: "string"
+ },
+ repo: {
+ required: true,
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/repos/:owner/:repo"
+ },
+ reviewProject: {
+ deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ reviewProjectInOrg: {
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ org: {
+ required: true,
+ type: "string"
+ },
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/projects/:project_id"
+ },
+ reviewProjectLegacy: {
+ deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy",
+ headers: {
+ accept: "application/vnd.github.inertia-preview+json"
+ },
+ method: "GET",
+ params: {
+ project_id: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/projects/:project_id"
+ },
+ update: {
+ deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ },
+ updateDiscussion: {
+ deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ updateDiscussionComment: {
+ deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)",
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionCommentInOrg: {
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionCommentLegacy: {
+ deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy",
+ method: "PATCH",
+ params: {
+ body: {
+ required: true,
+ type: "string"
+ },
+ comment_number: {
+ required: true,
+ type: "integer"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
+ },
+ updateDiscussionInOrg: {
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
+ },
+ updateDiscussionLegacy: {
+ deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy",
+ method: "PATCH",
+ params: {
+ body: {
+ type: "string"
+ },
+ discussion_number: {
+ required: true,
+ type: "integer"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/teams/:team_id/discussions/:discussion_number"
+ },
+ updateInOrg: {
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ org: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_slug: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/orgs/:org/teams/:team_slug"
+ },
+ updateLegacy: {
+ deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy",
+ method: "PATCH",
+ params: {
+ description: {
+ type: "string"
+ },
+ name: {
+ required: true,
+ type: "string"
+ },
+ parent_team_id: {
+ type: "integer"
+ },
+ permission: {
+ enum: ["pull", "push", "admin"],
+ type: "string"
+ },
+ privacy: {
+ enum: ["secret", "closed"],
+ type: "string"
+ },
+ team_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/teams/:team_id"
+ }
+ },
+ users: {
+ addEmails: {
+ method: "POST",
+ params: {
+ emails: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/emails"
+ },
+ block: {
+ method: "PUT",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ checkBlocked: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ checkFollowing: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ checkFollowingForUser: {
+ method: "GET",
+ params: {
+ target_user: {
+ required: true,
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/following/:target_user"
+ },
+ createGpgKey: {
+ method: "POST",
+ params: {
+ armored_public_key: {
+ type: "string"
+ }
+ },
+ url: "/user/gpg_keys"
+ },
+ createPublicKey: {
+ method: "POST",
+ params: {
+ key: {
+ type: "string"
+ },
+ title: {
+ type: "string"
+ }
+ },
+ url: "/user/keys"
+ },
+ deleteEmails: {
+ method: "DELETE",
+ params: {
+ emails: {
+ required: true,
+ type: "string[]"
+ }
+ },
+ url: "/user/emails"
+ },
+ deleteGpgKey: {
+ method: "DELETE",
+ params: {
+ gpg_key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys/:gpg_key_id"
+ },
+ deletePublicKey: {
+ method: "DELETE",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/keys/:key_id"
+ },
+ follow: {
+ method: "PUT",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ getAuthenticated: {
+ method: "GET",
+ params: {},
+ url: "/user"
+ },
+ getByUsername: {
+ method: "GET",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username"
+ },
+ getContextForUser: {
+ method: "GET",
+ params: {
+ subject_id: {
+ type: "string"
+ },
+ subject_type: {
+ enum: ["organization", "repository", "issue", "pull_request"],
+ type: "string"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/hovercard"
+ },
+ getGpgKey: {
+ method: "GET",
+ params: {
+ gpg_key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys/:gpg_key_id"
+ },
+ getPublicKey: {
+ method: "GET",
+ params: {
+ key_id: {
+ required: true,
+ type: "integer"
+ }
+ },
+ url: "/user/keys/:key_id"
+ },
+ list: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ since: {
+ type: "string"
+ }
+ },
+ url: "/users"
+ },
+ listBlocked: {
+ method: "GET",
+ params: {},
+ url: "/user/blocks"
+ },
+ listEmails: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/emails"
+ },
+ listFollowersForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/followers"
+ },
+ listFollowersForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/followers"
+ },
+ listFollowingForAuthenticatedUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/following"
+ },
+ listFollowingForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/following"
+ },
+ listGpgKeys: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/gpg_keys"
+ },
+ listGpgKeysForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/gpg_keys"
+ },
+ listPublicEmails: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/public_emails"
+ },
+ listPublicKeys: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ }
+ },
+ url: "/user/keys"
+ },
+ listPublicKeysForUser: {
+ method: "GET",
+ params: {
+ page: {
+ type: "integer"
+ },
+ per_page: {
+ type: "integer"
+ },
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/users/:username/keys"
+ },
+ togglePrimaryEmailVisibility: {
+ method: "PATCH",
+ params: {
+ email: {
+ required: true,
+ type: "string"
+ },
+ visibility: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/email/visibility"
+ },
+ unblock: {
+ method: "DELETE",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/blocks/:username"
+ },
+ unfollow: {
+ method: "DELETE",
+ params: {
+ username: {
+ required: true,
+ type: "string"
+ }
+ },
+ url: "/user/following/:username"
+ },
+ updateAuthenticated: {
+ method: "PATCH",
+ params: {
+ bio: {
+ type: "string"
+ },
+ blog: {
+ type: "string"
+ },
+ company: {
+ type: "string"
+ },
+ email: {
+ type: "string"
+ },
+ hireable: {
+ type: "boolean"
+ },
+ location: {
+ type: "string"
+ },
+ name: {
+ type: "string"
+ }
+ },
+ url: "/user"
+ }
+ }
+};
+
+const VERSION = "2.4.0";
+
+function registerEndpoints(octokit, routes) {
+ Object.keys(routes).forEach(namespaceName => {
+ if (!octokit[namespaceName]) {
+ octokit[namespaceName] = {};
+ }
+
+ Object.keys(routes[namespaceName]).forEach(apiName => {
+ const apiOptions = routes[namespaceName][apiName];
+ const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => {
+ if (typeof apiOptions[key] !== "undefined") {
+ map[key] = apiOptions[key];
+ }
+
+ return map;
+ }, {});
+ endpointDefaults.request = {
+ validate: apiOptions.params
+ };
+ let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters.
+ // Not the most elegant solution, but we don’t want to move deprecation
+ // logic into octokit/endpoint.js as it’s out of scope
+
+ const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated);
+
+ if (hasDeprecatedParam) {
+ const patch = patchForDeprecation.bind(null, octokit, apiOptions);
+ request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`);
+ request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`);
+ request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`);
+ }
+
+ if (apiOptions.deprecated) {
+ octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() {
+ octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`));
+ octokit[namespaceName][apiName] = request;
+ return request.apply(null, arguments);
+ }, request);
+ return;
+ }
+
+ octokit[namespaceName][apiName] = request;
+ });
+ });
+}
+
+function patchForDeprecation(octokit, apiOptions, method, methodName) {
+ const patchedMethod = options => {
+ options = Object.assign({}, options);
+ Object.keys(options).forEach(key => {
+ if (apiOptions.params[key] && apiOptions.params[key].deprecated) {
+ const aliasKey = apiOptions.params[key].alias;
+ octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`));
+
+ if (!(aliasKey in options)) {
+ options[aliasKey] = options[key];
+ }
+
+ delete options[key];
+ }
+ });
+ return method(options);
+ };
+
+ Object.keys(method).forEach(key => {
+ patchedMethod[key] = method[key];
+ });
+ return patchedMethod;
+}
+
+/**
+ * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
+ * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
+ * done, we will remove the registerEndpoints methods and return the methods
+ * directly as with the other plugins. At that point we will also remove the
+ * legacy workarounds and deprecations.
+ *
+ * See the plan at
+ * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
+ */
+
+function restEndpointMethods(octokit) {
+ // @ts-ignore
+ octokit.registerEndpoints = registerEndpoints.bind(null, octokit);
+ registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility
+ // See https://github.com/octokit/rest.js/pull/1134
+
+ [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => {
+ Object.defineProperty(octokit, deprecatedScope, {
+ get() {
+ octokit.log.warn( // @ts-ignore
+ new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore
+
+ return octokit[scope];
+ }
+
+ });
+ });
+ return {};
+}
+restEndpointMethods.VERSION = VERSION;
+
+exports.restEndpointMethods = restEndpointMethods;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 562:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ return "";
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 563:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getPreviousPage
+
+const getPage = __webpack_require__(265)
+
+function getPreviousPage (octokit, link, headers) {
+ return getPage(octokit, link, 'prev', headers)
+}
+
+
+/***/ }),
+
+/***/ 568:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const path = __webpack_require__(622);
+const niceTry = __webpack_require__(948);
+const resolveCommand = __webpack_require__(489);
+const escape = __webpack_require__(462);
+const readShebang = __webpack_require__(389);
+const semver = __webpack_require__(48);
+
+const isWin = process.platform === 'win32';
+const isExecutableRegExp = /\.(?:com|exe)$/i;
+const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
+
+// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
+const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false;
+
+function detectShebang(parsed) {
+ parsed.file = resolveCommand(parsed);
+
+ const shebang = parsed.file && readShebang(parsed.file);
+
+ if (shebang) {
+ parsed.args.unshift(parsed.file);
+ parsed.command = shebang;
+
+ return resolveCommand(parsed);
+ }
+
+ return parsed.file;
+}
+
+function parseNonShell(parsed) {
+ if (!isWin) {
+ return parsed;
+ }
+
+ // Detect & add support for shebangs
+ const commandFile = detectShebang(parsed);
+
+ // We don't need a shell if the command filename is an executable
+ const needsShell = !isExecutableRegExp.test(commandFile);
+
+ // If a shell is required, use cmd.exe and take care of escaping everything correctly
+ // Note that `forceShell` is an hidden option used only in tests
+ if (parsed.options.forceShell || needsShell) {
+ // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
+ // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
+ // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
+ // we need to double escape them
+ const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
+
+ // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
+ // This is necessary otherwise it will always fail with ENOENT in those cases
+ parsed.command = path.normalize(parsed.command);
+
+ // Escape command & arguments
+ parsed.command = escape.command(parsed.command);
+ parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
+
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.command = process.env.comspec || 'cmd.exe';
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ }
+
+ return parsed;
+}
+
+function parseShell(parsed) {
+ // If node supports the shell option, there's no need to mimic its behavior
+ if (supportsShellOption) {
+ return parsed;
+ }
+
+ // Mimic node shell option
+ // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ if (isWin) {
+ parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ } else {
+ if (typeof parsed.options.shell === 'string') {
+ parsed.command = parsed.options.shell;
+ } else if (process.platform === 'android') {
+ parsed.command = '/system/bin/sh';
+ } else {
+ parsed.command = '/bin/sh';
+ }
+
+ parsed.args = ['-c', shellCommand];
+ }
+
+ return parsed;
+}
+
+function parse(command, args, options) {
+ // Normalize arguments, similar to nodejs
+ if (args && !Array.isArray(args)) {
+ options = args;
+ args = null;
+ }
+
+ args = args ? args.slice(0) : []; // Clone array to avoid changing the original
+ options = Object.assign({}, options); // Clone object to avoid changing the original
+
+ // Build our parsed object
+ const parsed = {
+ command,
+ args,
+ options,
+ file: undefined,
+ original: {
+ command,
+ args,
+ },
+ };
+
+ // Delegate further parsing to shell or non-shell
+ return options.shell ? parseShell(parsed) : parseNonShell(parsed);
+}
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 570:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const exec_1 = __webpack_require__(986);
+const io = __importStar(__webpack_require__(1));
+const fs_1 = __webpack_require__(747);
+const path = __importStar(__webpack_require__(622));
+const constants_1 = __webpack_require__(573);
+function isGnuTar() {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.debug("Checking tar --version");
+ let versionOutput = "";
+ yield exec_1.exec("tar --version", [], {
+ ignoreReturnCode: true,
+ silent: true,
+ listeners: {
+ stdout: (data) => (versionOutput += data.toString()),
+ stderr: (data) => (versionOutput += data.toString())
+ }
+ });
+ core.debug(versionOutput.trim());
+ return versionOutput.toUpperCase().includes("GNU TAR");
+ });
+}
+exports.isGnuTar = isGnuTar;
+function getTarPath(args) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Explicitly use BSD Tar on Windows
+ const IS_WINDOWS = process.platform === "win32";
+ if (IS_WINDOWS) {
+ const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
+ if (fs_1.existsSync(systemTar)) {
+ return systemTar;
+ }
+ else if (isGnuTar()) {
+ args.push("--force-local");
+ }
+ }
+ return yield io.which("tar", true);
+ });
+}
+function execTar(args, cwd) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
+ }
+ catch (error) {
+ throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
+ }
+ });
+}
+function getWorkingDirectory() {
+ var _a;
+ return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd();
+}
+function extractTar(archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Create directory to extract tar into
+ const workingDirectory = getWorkingDirectory();
+ yield io.mkdirP(workingDirectory);
+ const args = [
+ "-xz",
+ "-f",
+ archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "-P",
+ "-C",
+ workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
+ ];
+ yield execTar(args);
+ });
+}
+exports.extractTar = extractTar;
+function createTar(archiveFolder, sourceDirectories) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Write source directories to manifest.txt to avoid command length limits
+ const manifestFilename = "manifest.txt";
+ fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
+ const workingDirectory = getWorkingDirectory();
+ const args = [
+ "-cz",
+ "-f",
+ constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "-P",
+ "-C",
+ workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
+ "--files-from",
+ manifestFilename
+ ];
+ yield execTar(args, archiveFolder);
+ });
+}
+exports.createTar = createTar;
+
+
+/***/ }),
+
+/***/ 573:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+var Inputs;
+(function (Inputs) {
+ Inputs["Key"] = "key";
+ Inputs["Path"] = "path";
+ Inputs["RestoreKeys"] = "restore-keys";
+})(Inputs = exports.Inputs || (exports.Inputs = {}));
+var Outputs;
+(function (Outputs) {
+ Outputs["CacheHit"] = "cache-hit";
+})(Outputs = exports.Outputs || (exports.Outputs = {}));
+var State;
+(function (State) {
+ State["CacheKey"] = "CACHE_KEY";
+ State["CacheResult"] = "CACHE_RESULT";
+})(State = exports.State || (exports.State = {}));
+var Events;
+(function (Events) {
+ Events["Key"] = "GITHUB_EVENT_NAME";
+ Events["Push"] = "push";
+ Events["PullRequest"] = "pull_request";
+})(Events = exports.Events || (exports.Events = {}));
+exports.CacheFilename = "cache.tgz";
+// Socket timeout in milliseconds during download. If no traffic is received
+// over the socket during this period, the socket is destroyed and the download
+// is aborted.
+exports.SocketTimeout = 5000;
+
+
+/***/ }),
+
+/***/ 577:
+/***/ (function(module) {
+
+module.exports = getPageLinks
+
+function getPageLinks (link) {
+ link = link.link || link.headers.link || ''
+
+ const links = {}
+
+ // link format:
+ // '; rel="next", ; rel="last"'
+ link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => {
+ links[type] = uri
+ })
+
+ return links
+}
+
+
+/***/ }),
+
+/***/ 597:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const pathHelper = __webpack_require__(972);
+const internal_match_kind_1 = __webpack_require__(327);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Given an array of patterns, returns an array of paths to search.
+ * Duplicates and paths under other included paths are filtered out.
+ */
+function getSearchPaths(patterns) {
+ // Ignore negate patterns
+ patterns = patterns.filter(x => !x.negate);
+ // Create a map of all search paths
+ const searchPathMap = {};
+ for (const pattern of patterns) {
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ searchPathMap[key] = 'candidate';
+ }
+ const result = [];
+ for (const pattern of patterns) {
+ // Check if already included
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ if (searchPathMap[key] === 'included') {
+ continue;
+ }
+ // Check for an ancestor search path
+ let foundAncestor = false;
+ let tempKey = key;
+ let parent = pathHelper.dirname(tempKey);
+ while (parent !== tempKey) {
+ if (searchPathMap[parent]) {
+ foundAncestor = true;
+ break;
+ }
+ tempKey = parent;
+ parent = pathHelper.dirname(tempKey);
+ }
+ // Include the search pattern in the result
+ if (!foundAncestor) {
+ result.push(pattern.searchPath);
+ searchPathMap[key] = 'included';
+ }
+ }
+ return result;
+}
+exports.getSearchPaths = getSearchPaths;
+/**
+ * Matches the patterns against the path
+ */
+function match(patterns, itemPath) {
+ let result = internal_match_kind_1.MatchKind.None;
+ for (const pattern of patterns) {
+ if (pattern.negate) {
+ result &= ~pattern.match(itemPath);
+ }
+ else {
+ result |= pattern.match(itemPath);
+ }
+ }
+ return result;
+}
+exports.match = match;
+/**
+ * Checks whether to descend further into the directory
+ */
+function partialMatch(patterns, itemPath) {
+ return patterns.some(x => !x.negate && x.partialMatch(itemPath));
+}
+exports.partialMatch = partialMatch;
+//# sourceMappingURL=internal-pattern-helper.js.map
+
+/***/ }),
+
+/***/ 601:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __webpack_require__(470);
+/**
+ * Returns a copy with defaults filled in.
+ */
+function getOptions(copy) {
+ const result = {
+ followSymbolicLinks: true,
+ implicitDescendants: true,
+ omitBrokenSymbolicLinks: true
+ };
+ if (copy) {
+ if (typeof copy.followSymbolicLinks === 'boolean') {
+ result.followSymbolicLinks = copy.followSymbolicLinks;
+ core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
+ }
+ if (typeof copy.implicitDescendants === 'boolean') {
+ result.implicitDescendants = copy.implicitDescendants;
+ core.debug(`implicitDescendants '${result.implicitDescendants}'`);
+ }
+ if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
+ result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
+ core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
+ }
+ }
+ return result;
+}
+exports.getOptions = getOptions;
+//# sourceMappingURL=internal-glob-options-helper.js.map
+
+/***/ }),
+
+/***/ 605:
+/***/ (function(module) {
+
+module.exports = require("http");
+
+/***/ }),
+
+/***/ 614:
+/***/ (function(module) {
+
+module.exports = require("events");
+
+/***/ }),
+
+/***/ 621:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const path = __webpack_require__(622);
+const pathKey = __webpack_require__(39);
+
+module.exports = opts => {
+ opts = Object.assign({
+ cwd: process.cwd(),
+ path: process.env[pathKey()]
+ }, opts);
+
+ let prev;
+ let pth = path.resolve(opts.cwd);
+ const ret = [];
+
+ while (prev !== pth) {
+ ret.push(path.join(pth, 'node_modules/.bin'));
+ prev = pth;
+ pth = path.resolve(pth, '..');
+ }
+
+ // ensure the running `node` binary is used
+ ret.push(path.dirname(process.execPath));
+
+ return ret.concat(opts.path).join(path.delimiter);
+};
+
+module.exports.env = opts => {
+ opts = Object.assign({
+ env: process.env
+ }, opts);
+
+ const env = Object.assign({}, opts.env);
+ const path = pathKey({env});
+
+ opts.path = env[path];
+ env[path] = module.exports(opts);
+
+ return env;
+};
+
+
+/***/ }),
+
+/***/ 622:
+/***/ (function(module) {
+
+module.exports = require("path");
+
+/***/ }),
+
+/***/ 631:
+/***/ (function(module) {
+
+module.exports = require("net");
+
+/***/ }),
+
+/***/ 634:
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const tc = __importStar(__webpack_require__(533));
+const path = __importStar(__webpack_require__(622));
+const semver = __importStar(__webpack_require__(864));
+const httpm = __importStar(__webpack_require__(539));
+const sys = __importStar(__webpack_require__(646));
+const core_1 = __webpack_require__(470);
+function downloadGo(versionSpec, stable) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let toolPath;
+ try {
+ let match = yield findMatch(versionSpec, stable);
+ if (match) {
+ // download
+ core_1.debug(`match ${match.version}`);
+ let downloadUrl = `https://storage.googleapis.com/golang/${match.files[0].filename}`;
+ console.log(`Downloading from ${downloadUrl}`);
+ let downloadPath = yield tc.downloadTool(downloadUrl);
+ core_1.debug(`downloaded to ${downloadPath}`);
+ // extract
+ console.log('Extracting ...');
+ let extPath = sys.getPlatform() == 'windows'
+ ? yield tc.extractZip(downloadPath)
+ : yield tc.extractTar(downloadPath);
+ core_1.debug(`extracted to ${extPath}`);
+ // extracts with a root folder that matches the fileName downloaded
+ const toolRoot = path.join(extPath, 'go');
+ toolPath = yield tc.cacheDir(toolRoot, 'go', makeSemver(match.version));
+ }
+ }
+ catch (error) {
+ throw new Error(`Failed to download version ${versionSpec}: ${error}`);
+ }
+ return toolPath;
+ });
+}
+exports.downloadGo = downloadGo;
+function findMatch(versionSpec, stable) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let archFilter = sys.getArch();
+ let platFilter = sys.getPlatform();
+ let result;
+ let match;
+ const dlUrl = 'https://golang.org/dl/?mode=json&include=all';
+ let candidates = yield module.exports.getVersions(dlUrl);
+ if (!candidates) {
+ throw new Error(`golang download url did not return results`);
+ }
+ let goFile;
+ for (let i = 0; i < candidates.length; i++) {
+ let candidate = candidates[i];
+ let version = makeSemver(candidate.version);
+ // 1.13.0 is advertised as 1.13 preventing being able to match exactly 1.13.0
+ // since a semver of 1.13 would match latest 1.13
+ let parts = version.split('.');
+ if (parts.length == 2) {
+ version = version + '.0';
+ }
+ core_1.debug(`check ${version} satisfies ${versionSpec}`);
+ if (semver.satisfies(version, versionSpec) &&
+ (!stable || candidate.stable === stable)) {
+ goFile = candidate.files.find(file => {
+ core_1.debug(`${file.arch}===${archFilter} && ${file.os}===${platFilter}`);
+ return file.arch === archFilter && file.os === platFilter;
+ });
+ if (goFile) {
+ core_1.debug(`matched ${candidate.version}`);
+ match = candidate;
+ break;
+ }
+ }
+ }
+ if (match && goFile) {
+ // clone since we're mutating the file list to be only the file that matches
+ result = Object.assign({}, match);
+ result.files = [goFile];
+ }
+ return result;
+ });
+}
+exports.findMatch = findMatch;
+function getVersions(dlUrl) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // this returns versions descending so latest is first
+ let http = new httpm.HttpClient('setup-go');
+ return (yield http.getJson(dlUrl)).result;
+ });
+}
+exports.getVersions = getVersions;
+//
+// Convert the go version syntax into semver for semver matching
+// 1.13.1 => 1.13.1
+// 1.13 => 1.13.0
+// 1.10beta1 => 1.10.0-beta1, 1.10rc1 => 1.10.0-rc1
+// 1.8.5beta1 => 1.8.5-beta1, 1.8.5rc1 => 1.8.5-rc1
+function makeSemver(version) {
+ version = version.replace('go', '');
+ version = version.replace('beta', '-beta').replace('rc', '-rc');
+ let parts = version.split('-');
+ let verPart = parts[0];
+ let prereleasePart = parts.length > 1 ? `-${parts[1]}` : '';
+ let verParts = verPart.split('.');
+ if (verParts.length == 2) {
+ verPart += '.0';
+ }
+ return `${verPart}${prereleasePart}`;
+}
+exports.makeSemver = makeSemver;
+
+
+/***/ }),
+
+/***/ 644:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+exports.alphasort = alphasort
+exports.alphasorti = alphasorti
+exports.setopts = setopts
+exports.ownProp = ownProp
+exports.makeAbs = makeAbs
+exports.finish = finish
+exports.mark = mark
+exports.isIgnored = isIgnored
+exports.childrenIgnored = childrenIgnored
+
+function ownProp (obj, field) {
+ return Object.prototype.hasOwnProperty.call(obj, field)
+}
+
+var path = __webpack_require__(622)
+var minimatch = __webpack_require__(93)
+var isAbsolute = __webpack_require__(681)
+var Minimatch = minimatch.Minimatch
+
+function alphasorti (a, b) {
+ return a.toLowerCase().localeCompare(b.toLowerCase())
+}
+
+function alphasort (a, b) {
+ return a.localeCompare(b)
+}
+
+function setupIgnores (self, options) {
+ self.ignore = options.ignore || []
+
+ if (!Array.isArray(self.ignore))
+ self.ignore = [self.ignore]
+
+ if (self.ignore.length) {
+ self.ignore = self.ignore.map(ignoreMap)
+ }
+}
+
+// ignore patterns are always in dot:true mode.
+function ignoreMap (pattern) {
+ var gmatcher = null
+ if (pattern.slice(-3) === '/**') {
+ var gpattern = pattern.replace(/(\/\*\*)+$/, '')
+ gmatcher = new Minimatch(gpattern, { dot: true })
+ }
+
+ return {
+ matcher: new Minimatch(pattern, { dot: true }),
+ gmatcher: gmatcher
+ }
+}
+
+function setopts (self, pattern, options) {
+ if (!options)
+ options = {}
+
+ // base-matching: just use globstar for that.
+ if (options.matchBase && -1 === pattern.indexOf("/")) {
+ if (options.noglobstar) {
+ throw new Error("base matching requires globstar")
+ }
+ pattern = "**/" + pattern
+ }
+
+ self.silent = !!options.silent
+ self.pattern = pattern
+ self.strict = options.strict !== false
+ self.realpath = !!options.realpath
+ self.realpathCache = options.realpathCache || Object.create(null)
+ self.follow = !!options.follow
+ self.dot = !!options.dot
+ self.mark = !!options.mark
+ self.nodir = !!options.nodir
+ if (self.nodir)
+ self.mark = true
+ self.sync = !!options.sync
+ self.nounique = !!options.nounique
+ self.nonull = !!options.nonull
+ self.nosort = !!options.nosort
+ self.nocase = !!options.nocase
+ self.stat = !!options.stat
+ self.noprocess = !!options.noprocess
+ self.absolute = !!options.absolute
+
+ self.maxLength = options.maxLength || Infinity
+ self.cache = options.cache || Object.create(null)
+ self.statCache = options.statCache || Object.create(null)
+ self.symlinks = options.symlinks || Object.create(null)
+
+ setupIgnores(self, options)
+
+ self.changedCwd = false
+ var cwd = process.cwd()
+ if (!ownProp(options, "cwd"))
+ self.cwd = cwd
+ else {
+ self.cwd = path.resolve(options.cwd)
+ self.changedCwd = self.cwd !== cwd
+ }
+
+ self.root = options.root || path.resolve(self.cwd, "/")
+ self.root = path.resolve(self.root)
+ if (process.platform === "win32")
+ self.root = self.root.replace(/\\/g, "/")
+
+ // TODO: is an absolute `cwd` supposed to be resolved against `root`?
+ // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
+ self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
+ if (process.platform === "win32")
+ self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
+ self.nomount = !!options.nomount
+
+ // disable comments and negation in Minimatch.
+ // Note that they are not supported in Glob itself anyway.
+ options.nonegate = true
+ options.nocomment = true
+
+ self.minimatch = new Minimatch(pattern, options)
+ self.options = self.minimatch.options
+}
+
+function finish (self) {
+ var nou = self.nounique
+ var all = nou ? [] : Object.create(null)
+
+ for (var i = 0, l = self.matches.length; i < l; i ++) {
+ var matches = self.matches[i]
+ if (!matches || Object.keys(matches).length === 0) {
+ if (self.nonull) {
+ // do like the shell, and spit out the literal glob
+ var literal = self.minimatch.globSet[i]
+ if (nou)
+ all.push(literal)
+ else
+ all[literal] = true
+ }
+ } else {
+ // had matches
+ var m = Object.keys(matches)
+ if (nou)
+ all.push.apply(all, m)
+ else
+ m.forEach(function (m) {
+ all[m] = true
+ })
+ }
+ }
+
+ if (!nou)
+ all = Object.keys(all)
+
+ if (!self.nosort)
+ all = all.sort(self.nocase ? alphasorti : alphasort)
+
+ // at *some* point we statted all of these
+ if (self.mark) {
+ for (var i = 0; i < all.length; i++) {
+ all[i] = self._mark(all[i])
+ }
+ if (self.nodir) {
+ all = all.filter(function (e) {
+ var notDir = !(/\/$/.test(e))
+ var c = self.cache[e] || self.cache[makeAbs(self, e)]
+ if (notDir && c)
+ notDir = c !== 'DIR' && !Array.isArray(c)
+ return notDir
+ })
+ }
+ }
+
+ if (self.ignore.length)
+ all = all.filter(function(m) {
+ return !isIgnored(self, m)
+ })
+
+ self.found = all
+}
+
+function mark (self, p) {
+ var abs = makeAbs(self, p)
+ var c = self.cache[abs]
+ var m = p
+ if (c) {
+ var isDir = c === 'DIR' || Array.isArray(c)
+ var slash = p.slice(-1) === '/'
+
+ if (isDir && !slash)
+ m += '/'
+ else if (!isDir && slash)
+ m = m.slice(0, -1)
+
+ if (m !== p) {
+ var mabs = makeAbs(self, m)
+ self.statCache[mabs] = self.statCache[abs]
+ self.cache[mabs] = self.cache[abs]
+ }
+ }
+
+ return m
+}
+
+// lotta situps...
+function makeAbs (self, f) {
+ var abs = f
+ if (f.charAt(0) === '/') {
+ abs = path.join(self.root, f)
+ } else if (isAbsolute(f) || f === '') {
+ abs = f
+ } else if (self.changedCwd) {
+ abs = path.resolve(self.cwd, f)
+ } else {
+ abs = path.resolve(f)
+ }
+
+ if (process.platform === 'win32')
+ abs = abs.replace(/\\/g, '/')
+
+ return abs
+}
+
+
+// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
+// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
+function isIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+function childrenIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+
+/***/ }),
+
+/***/ 646:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+let os = __webpack_require__(87);
+function getPlatform() {
+ // darwin and linux match already
+ // freebsd not supported yet but future proofed.
+ // 'aix', 'darwin', 'freebsd', 'linux', 'openbsd', 'sunos', and 'win32'
+ let plat = os.platform();
+ // wants 'darwin', 'freebsd', 'linux', 'windows'
+ if (plat === 'win32') {
+ plat = 'windows';
+ }
+ return plat;
+}
+exports.getPlatform = getPlatform;
+function getArch() {
+ // 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', 'x32', and 'x64'.
+ let arch = os.arch();
+ // wants amd64, 386, arm64, armv61, ppc641e, s390x
+ // currently not supported by runner but future proofed mapping
+ switch (arch) {
+ case 'x64':
+ arch = 'amd64';
+ break;
+ // case 'ppc':
+ // arch = 'ppc64';
+ // break;
+ case 'x32':
+ arch = '386';
+ break;
+ }
+ return arch;
+}
+exports.getArch = getArch;
+
+
+/***/ }),
+
+/***/ 649:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getLastPage
+
+const getPage = __webpack_require__(265)
+
+function getLastPage (octokit, link, headers) {
+ return getPage(octokit, link, 'last', headers)
+}
+
+
+/***/ }),
+
+/***/ 654:
+/***/ (function(module) {
+
+// This is not the set of all possible signals.
+//
+// It IS, however, the set of all signals that trigger
+// an exit on either Linux or BSD systems. Linux is a
+// superset of the signal names supported on BSD, and
+// the unknown signals just fail to register, so we can
+// catch that easily enough.
+//
+// Don't bother with SIGKILL. It's uncatchable, which
+// means that we can't fire any callbacks anyway.
+//
+// If a user does happen to register a handler on a non-
+// fatal signal like SIGWINCH or something, and then
+// exit, it'll end up firing `process.emit('exit')`, so
+// the handler will be fired anyway.
+//
+// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
+// artificially, inherently leave the process in a
+// state from which it is not safe to try and enter JS
+// listeners.
+module.exports = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGHUP',
+ 'SIGINT',
+ 'SIGTERM'
+]
+
+if (process.platform !== 'win32') {
+ module.exports.push(
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGUSR2',
+ 'SIGTRAP',
+ 'SIGSYS',
+ 'SIGQUIT',
+ 'SIGIOT'
+ // should detect profiler and enable/disable accordingly.
+ // see #21
+ // 'SIGPROF'
+ )
+}
+
+if (process.platform === 'linux') {
+ module.exports.push(
+ 'SIGIO',
+ 'SIGPOLL',
+ 'SIGPWR',
+ 'SIGSTKFLT',
+ 'SIGUNUSED'
+ )
+}
+
+
+/***/ }),
+
+/***/ 655:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const tc = __importStar(__webpack_require__(533));
+const path_1 = __importDefault(__webpack_require__(622));
+const main_1 = __webpack_require__(514);
+// The installLint returns path to installed binary of golangci-lint.
+function installLint(versionConfig) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.info(`Installing golangci-lint ${versionConfig.TargetVersion}...`);
+ const startedAt = Date.now();
+ core.info(`Downloading ${versionConfig.AssetURL} ...`);
+ const tarGzPath = yield tc.downloadTool(versionConfig.AssetURL);
+ const extractedDir = yield tc.extractTar(tarGzPath, process.env.HOME);
+ const urlParts = versionConfig.AssetURL.split(`/`);
+ const dirName = urlParts[urlParts.length - 1].replace(/\.tar\.gz$/, ``);
+ const lintPath = path_1.default.join(extractedDir, dirName, `golangci-lint`);
+ core.info(`Installed golangci-lint into ${lintPath} in ${Date.now() - startedAt}ms`);
+ return lintPath;
+ });
+}
+exports.installLint = installLint;
+function installGo() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ process.env[`INPUT_GO-VERSION`] = `1`;
+ yield main_1.run();
+ core.info(`Installed Go in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.installGo = installGo;
+
+
+/***/ }),
+
+/***/ 669:
+/***/ (function(module) {
+
+module.exports = require("util");
+
+/***/ }),
+
+/***/ 672:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var _a;
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert_1 = __webpack_require__(357);
+const fs = __webpack_require__(747);
+const path = __webpack_require__(622);
+_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
+exports.IS_WINDOWS = process.platform === 'win32';
+function exists(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ yield exports.stat(fsPath);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ return false;
+ }
+ throw err;
+ }
+ return true;
+ });
+}
+exports.exists = exists;
+function isDirectory(fsPath, useStat = false) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
+ return stats.isDirectory();
+ });
+}
+exports.isDirectory = isDirectory;
+/**
+ * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
+ * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
+ */
+function isRooted(p) {
+ p = normalizeSeparators(p);
+ if (!p) {
+ throw new Error('isRooted() parameter "p" cannot be empty');
+ }
+ if (exports.IS_WINDOWS) {
+ return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
+ ); // e.g. C: or C:\hello
+ }
+ return p.startsWith('/');
+}
+exports.isRooted = isRooted;
+/**
+ * Recursively create a directory at `fsPath`.
+ *
+ * This implementation is optimistic, meaning it attempts to create the full
+ * path first, and backs up the path stack from there.
+ *
+ * @param fsPath The path to create
+ * @param maxDepth The maximum recursion depth
+ * @param depth The current recursion depth
+ */
+function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
+ return __awaiter(this, void 0, void 0, function* () {
+ assert_1.ok(fsPath, 'a path argument must be provided');
+ fsPath = path.resolve(fsPath);
+ if (depth >= maxDepth)
+ return exports.mkdir(fsPath);
+ try {
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ catch (err) {
+ switch (err.code) {
+ case 'ENOENT': {
+ yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ default: {
+ let stats;
+ try {
+ stats = yield exports.stat(fsPath);
+ }
+ catch (err2) {
+ throw err;
+ }
+ if (!stats.isDirectory())
+ throw err;
+ }
+ }
+ }
+ });
+}
+exports.mkdirP = mkdirP;
+/**
+ * Best effort attempt to determine whether a file exists and is executable.
+ * @param filePath file path to check
+ * @param extensions additional file extensions to try
+ * @return if file exists and is executable, returns the file path. otherwise empty string.
+ */
+function tryGetExecutablePath(filePath, extensions) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let stats = undefined;
+ try {
+ // test file exists
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // on Windows, test for valid extension
+ const upperExt = path.extname(filePath).toUpperCase();
+ if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
+ return filePath;
+ }
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ // try each extension
+ const originalFilePath = filePath;
+ for (const extension of extensions) {
+ filePath = originalFilePath + extension;
+ stats = undefined;
+ try {
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // preserve the case of the actual file (since an extension was appended)
+ try {
+ const directory = path.dirname(filePath);
+ const upperName = path.basename(filePath).toUpperCase();
+ for (const actualName of yield exports.readdir(directory)) {
+ if (upperName === actualName.toUpperCase()) {
+ filePath = path.join(directory, actualName);
+ break;
+ }
+ }
+ }
+ catch (err) {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
+ }
+ return filePath;
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ }
+ return '';
+ });
+}
+exports.tryGetExecutablePath = tryGetExecutablePath;
+function normalizeSeparators(p) {
+ p = p || '';
+ if (exports.IS_WINDOWS) {
+ // convert slashes on Windows
+ p = p.replace(/\//g, '\\');
+ // remove redundant slashes
+ return p.replace(/\\\\+/g, '\\');
+ }
+ // remove redundant slashes
+ return p.replace(/\/\/+/g, '/');
+}
+// on Mac/Linux, test the execute bit
+// R W X R W X R W X
+// 256 128 64 32 16 8 4 2 1
+function isUnixExecutable(stats) {
+ return ((stats.mode & 1) > 0 ||
+ ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
+ ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
+}
+//# sourceMappingURL=io-util.js.map
+
+/***/ }),
+
+/***/ 674:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var wrappy = __webpack_require__(11)
+var reqs = Object.create(null)
+var once = __webpack_require__(969)
+
+module.exports = wrappy(inflight)
+
+function inflight (key, cb) {
+ if (reqs[key]) {
+ reqs[key].push(cb)
+ return null
+ } else {
+ reqs[key] = [cb]
+ return makeres(key)
+ }
+}
+
+function makeres (key) {
+ return once(function RES () {
+ var cbs = reqs[key]
+ var len = cbs.length
+ var args = slice(arguments)
+
+ // XXX It's somewhat ambiguous whether a new callback added in this
+ // pass should be queued for later execution if something in the
+ // list of callbacks throws, or if it should just be discarded.
+ // However, it's such an edge case that it hardly matters, and either
+ // choice is likely as surprising as the other.
+ // As it happens, we do go ahead and schedule it for later execution.
+ try {
+ for (var i = 0; i < len; i++) {
+ cbs[i].apply(null, args)
+ }
+ } finally {
+ if (cbs.length > len) {
+ // added more in the interim.
+ // de-zalgo, just in case, but don't call again.
+ cbs.splice(0, len)
+ process.nextTick(function () {
+ RES.apply(null, args)
+ })
+ } else {
+ delete reqs[key]
+ }
+ }
+ })
+}
+
+function slice (args) {
+ var length = args.length
+ var array = []
+
+ for (var i = 0; i < length; i++) array[i] = args[i]
+ return array
+}
+
+
+/***/ }),
+
+/***/ 675:
+/***/ (function(module) {
+
+module.exports = function btoa(str) {
+ return new Buffer(str).toString('base64')
+}
+
+
+/***/ }),
+
+/***/ 681:
+/***/ (function(module) {
+
+"use strict";
+
+
+function posix(path) {
+ return path.charAt(0) === '/';
+}
+
+function win32(path) {
+ // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
+ var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
+ var result = splitDeviceRe.exec(path);
+ var device = result[1] || '';
+ var isUnc = Boolean(device && device.charAt(1) !== ':');
+
+ // UNC paths are always absolute
+ return Boolean(result[2] || isUnc);
+}
+
+module.exports = process.platform === 'win32' ? win32 : posix;
+module.exports.posix = posix;
+module.exports.win32 = win32;
+
+
+/***/ }),
+
+/***/ 686:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const os = __importStar(__webpack_require__(87));
+const events = __importStar(__webpack_require__(614));
+const child = __importStar(__webpack_require__(129));
+const path = __importStar(__webpack_require__(622));
+const io = __importStar(__webpack_require__(1));
+const ioUtil = __importStar(__webpack_require__(672));
+/* eslint-disable @typescript-eslint/unbound-method */
+const IS_WINDOWS = process.platform === 'win32';
+/*
+ * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
+ */
+class ToolRunner extends events.EventEmitter {
+ constructor(toolPath, args, options) {
+ super();
+ if (!toolPath) {
+ throw new Error("Parameter 'toolPath' cannot be null or empty.");
+ }
+ this.toolPath = toolPath;
+ this.args = args || [];
+ this.options = options || {};
+ }
+ _debug(message) {
+ if (this.options.listeners && this.options.listeners.debug) {
+ this.options.listeners.debug(message);
+ }
+ }
+ _getCommandString(options, noPrefix) {
+ const toolPath = this._getSpawnFileName();
+ const args = this._getSpawnArgs(options);
+ let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
+ if (IS_WINDOWS) {
+ // Windows + cmd file
+ if (this._isCmdFile()) {
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows + verbatim
+ else if (options.windowsVerbatimArguments) {
+ cmd += `"${toolPath}"`;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows (regular)
+ else {
+ cmd += this._windowsQuoteCmdArg(toolPath);
+ for (const a of args) {
+ cmd += ` ${this._windowsQuoteCmdArg(a)}`;
+ }
+ }
+ }
+ else {
+ // OSX/Linux - this can likely be improved with some form of quoting.
+ // creating processes on Unix is fundamentally different than Windows.
+ // on Unix, execvp() takes an arg array.
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ return cmd;
+ }
+ _processLineBuffer(data, strBuffer, onLine) {
+ try {
+ let s = strBuffer + data.toString();
+ let n = s.indexOf(os.EOL);
+ while (n > -1) {
+ const line = s.substring(0, n);
+ onLine(line);
+ // the rest of the string ...
+ s = s.substring(n + os.EOL.length);
+ n = s.indexOf(os.EOL);
+ }
+ strBuffer = s;
+ }
+ catch (err) {
+ // streaming lines to console is best effort. Don't fail a build.
+ this._debug(`error processing line. Failed with error ${err}`);
+ }
+ }
+ _getSpawnFileName() {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ return process.env['COMSPEC'] || 'cmd.exe';
+ }
+ }
+ return this.toolPath;
+ }
+ _getSpawnArgs(options) {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
+ for (const a of this.args) {
+ argline += ' ';
+ argline += options.windowsVerbatimArguments
+ ? a
+ : this._windowsQuoteCmdArg(a);
+ }
+ argline += '"';
+ return [argline];
+ }
+ }
+ return this.args;
+ }
+ _endsWith(str, end) {
+ return str.endsWith(end);
+ }
+ _isCmdFile() {
+ const upperToolPath = this.toolPath.toUpperCase();
+ return (this._endsWith(upperToolPath, '.CMD') ||
+ this._endsWith(upperToolPath, '.BAT'));
+ }
+ _windowsQuoteCmdArg(arg) {
+ // for .exe, apply the normal quoting rules that libuv applies
+ if (!this._isCmdFile()) {
+ return this._uvQuoteCmdArg(arg);
+ }
+ // otherwise apply quoting rules specific to the cmd.exe command line parser.
+ // the libuv rules are generic and are not designed specifically for cmd.exe
+ // command line parser.
+ //
+ // for a detailed description of the cmd.exe command line parser, refer to
+ // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
+ // need quotes for empty arg
+ if (!arg) {
+ return '""';
+ }
+ // determine whether the arg needs to be quoted
+ const cmdSpecialChars = [
+ ' ',
+ '\t',
+ '&',
+ '(',
+ ')',
+ '[',
+ ']',
+ '{',
+ '}',
+ '^',
+ '=',
+ ';',
+ '!',
+ "'",
+ '+',
+ ',',
+ '`',
+ '~',
+ '|',
+ '<',
+ '>',
+ '"'
+ ];
+ let needsQuotes = false;
+ for (const char of arg) {
+ if (cmdSpecialChars.some(x => x === char)) {
+ needsQuotes = true;
+ break;
+ }
+ }
+ // short-circuit if quotes not needed
+ if (!needsQuotes) {
+ return arg;
+ }
+ // the following quoting rules are very similar to the rules that by libuv applies.
+ //
+ // 1) wrap the string in quotes
+ //
+ // 2) double-up quotes - i.e. " => ""
+ //
+ // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
+ // doesn't work well with a cmd.exe command line.
+ //
+ // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
+ // for example, the command line:
+ // foo.exe "myarg:""my val"""
+ // is parsed by a .NET console app into an arg array:
+ // [ "myarg:\"my val\"" ]
+ // which is the same end result when applying libuv quoting rules. although the actual
+ // command line from libuv quoting rules would look like:
+ // foo.exe "myarg:\"my val\""
+ //
+ // 3) double-up slashes that precede a quote,
+ // e.g. hello \world => "hello \world"
+ // hello\"world => "hello\\""world"
+ // hello\\"world => "hello\\\\""world"
+ // hello world\ => "hello world\\"
+ //
+ // technically this is not required for a cmd.exe command line, or the batch argument parser.
+ // the reasons for including this as a .cmd quoting rule are:
+ //
+ // a) this is optimized for the scenario where the argument is passed from the .cmd file to an
+ // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
+ //
+ // b) it's what we've been doing previously (by deferring to node default behavior) and we
+ // haven't heard any complaints about that aspect.
+ //
+ // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
+ // escaped when used on the command line directly - even though within a .cmd file % can be escaped
+ // by using %%.
+ //
+ // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
+ // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
+ //
+ // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
+ // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
+ // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
+ // to an external program.
+ //
+ // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
+ // % can be escaped within a .cmd file.
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\'; // double the slash
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '"'; // double the quote
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _uvQuoteCmdArg(arg) {
+ // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
+ // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
+ // is used.
+ //
+ // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
+ // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
+ // pasting copyright notice from Node within this function:
+ //
+ // Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+ //
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
+ // of this software and associated documentation files (the "Software"), to
+ // deal in the Software without restriction, including without limitation the
+ // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ // sell copies of the Software, and to permit persons to whom the Software is
+ // furnished to do so, subject to the following conditions:
+ //
+ // The above copyright notice and this permission notice shall be included in
+ // all copies or substantial portions of the Software.
+ //
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ // IN THE SOFTWARE.
+ if (!arg) {
+ // Need double quotation for empty argument
+ return '""';
+ }
+ if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
+ // No quotation needed
+ return arg;
+ }
+ if (!arg.includes('"') && !arg.includes('\\')) {
+ // No embedded double quotes or backslashes, so I can just wrap
+ // quote marks around the whole thing.
+ return `"${arg}"`;
+ }
+ // Expected input/output:
+ // input : hello"world
+ // output: "hello\"world"
+ // input : hello""world
+ // output: "hello\"\"world"
+ // input : hello\world
+ // output: hello\world
+ // input : hello\\world
+ // output: hello\\world
+ // input : hello\"world
+ // output: "hello\\\"world"
+ // input : hello\\"world
+ // output: "hello\\\\\"world"
+ // input : hello world\
+ // output: "hello world\\" - note the comment in libuv actually reads "hello world\"
+ // but it appears the comment is wrong, it should be "hello world\\"
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\';
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '\\';
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _cloneExecOptions(options) {
+ options = options || {};
+ const result = {
+ cwd: options.cwd || process.cwd(),
+ env: options.env || process.env,
+ silent: options.silent || false,
+ windowsVerbatimArguments: options.windowsVerbatimArguments || false,
+ failOnStdErr: options.failOnStdErr || false,
+ ignoreReturnCode: options.ignoreReturnCode || false,
+ delay: options.delay || 10000
+ };
+ result.outStream = options.outStream || process.stdout;
+ result.errStream = options.errStream || process.stderr;
+ return result;
+ }
+ _getSpawnOptions(options, toolPath) {
+ options = options || {};
+ const result = {};
+ result.cwd = options.cwd;
+ result.env = options.env;
+ result['windowsVerbatimArguments'] =
+ options.windowsVerbatimArguments || this._isCmdFile();
+ if (options.windowsVerbatimArguments) {
+ result.argv0 = `"${toolPath}"`;
+ }
+ return result;
+ }
+ /**
+ * Exec a tool.
+ * Output will be streamed to the live console.
+ * Returns promise with return code
+ *
+ * @param tool path to tool to exec
+ * @param options optional exec options. See ExecOptions
+ * @returns number
+ */
+ exec() {
+ return __awaiter(this, void 0, void 0, function* () {
+ // root the tool path if it is unrooted and contains relative pathing
+ if (!ioUtil.isRooted(this.toolPath) &&
+ (this.toolPath.includes('/') ||
+ (IS_WINDOWS && this.toolPath.includes('\\')))) {
+ // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
+ this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
+ }
+ // if the tool is only a file name, then resolve it from the PATH
+ // otherwise verify it exists (add extension on Windows if necessary)
+ this.toolPath = yield io.which(this.toolPath, true);
+ return new Promise((resolve, reject) => {
+ this._debug(`exec tool: ${this.toolPath}`);
+ this._debug('arguments:');
+ for (const arg of this.args) {
+ this._debug(` ${arg}`);
+ }
+ const optionsNonNull = this._cloneExecOptions(this.options);
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
+ }
+ const state = new ExecState(optionsNonNull, this.toolPath);
+ state.on('debug', (message) => {
+ this._debug(message);
+ });
+ const fileName = this._getSpawnFileName();
+ const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
+ const stdbuffer = '';
+ if (cp.stdout) {
+ cp.stdout.on('data', (data) => {
+ if (this.options.listeners && this.options.listeners.stdout) {
+ this.options.listeners.stdout(data);
+ }
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(data);
+ }
+ this._processLineBuffer(data, stdbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.stdline) {
+ this.options.listeners.stdline(line);
+ }
+ });
+ });
+ }
+ const errbuffer = '';
+ if (cp.stderr) {
+ cp.stderr.on('data', (data) => {
+ state.processStderr = true;
+ if (this.options.listeners && this.options.listeners.stderr) {
+ this.options.listeners.stderr(data);
+ }
+ if (!optionsNonNull.silent &&
+ optionsNonNull.errStream &&
+ optionsNonNull.outStream) {
+ const s = optionsNonNull.failOnStdErr
+ ? optionsNonNull.errStream
+ : optionsNonNull.outStream;
+ s.write(data);
+ }
+ this._processLineBuffer(data, errbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.errline) {
+ this.options.listeners.errline(line);
+ }
+ });
+ });
+ }
+ cp.on('error', (err) => {
+ state.processError = err.message;
+ state.processExited = true;
+ state.processClosed = true;
+ state.CheckComplete();
+ });
+ cp.on('exit', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ cp.on('close', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ state.processClosed = true;
+ this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ state.on('done', (error, exitCode) => {
+ if (stdbuffer.length > 0) {
+ this.emit('stdline', stdbuffer);
+ }
+ if (errbuffer.length > 0) {
+ this.emit('errline', errbuffer);
+ }
+ cp.removeAllListeners();
+ if (error) {
+ reject(error);
+ }
+ else {
+ resolve(exitCode);
+ }
+ });
+ if (this.options.input) {
+ if (!cp.stdin) {
+ throw new Error('child process missing stdin');
+ }
+ cp.stdin.end(this.options.input);
+ }
+ });
+ });
+ }
+}
+exports.ToolRunner = ToolRunner;
+/**
+ * Convert an arg string to an array of args. Handles escaping
+ *
+ * @param argString string of arguments
+ * @returns string[] array of arguments
+ */
+function argStringToArray(argString) {
+ const args = [];
+ let inQuotes = false;
+ let escaped = false;
+ let arg = '';
+ function append(c) {
+ // we only escape double quotes.
+ if (escaped && c !== '"') {
+ arg += '\\';
+ }
+ arg += c;
+ escaped = false;
+ }
+ for (let i = 0; i < argString.length; i++) {
+ const c = argString.charAt(i);
+ if (c === '"') {
+ if (!escaped) {
+ inQuotes = !inQuotes;
+ }
+ else {
+ append(c);
+ }
+ continue;
+ }
+ if (c === '\\' && escaped) {
+ append(c);
+ continue;
+ }
+ if (c === '\\' && inQuotes) {
+ escaped = true;
+ continue;
+ }
+ if (c === ' ' && !inQuotes) {
+ if (arg.length > 0) {
+ args.push(arg);
+ arg = '';
+ }
+ continue;
+ }
+ append(c);
+ }
+ if (arg.length > 0) {
+ args.push(arg.trim());
+ }
+ return args;
+}
+exports.argStringToArray = argStringToArray;
+class ExecState extends events.EventEmitter {
+ constructor(options, toolPath) {
+ super();
+ this.processClosed = false; // tracks whether the process has exited and stdio is closed
+ this.processError = '';
+ this.processExitCode = 0;
+ this.processExited = false; // tracks whether the process has exited
+ this.processStderr = false; // tracks whether stderr was written to
+ this.delay = 10000; // 10 seconds
+ this.done = false;
+ this.timeout = null;
+ if (!toolPath) {
+ throw new Error('toolPath must not be empty');
+ }
+ this.options = options;
+ this.toolPath = toolPath;
+ if (options.delay) {
+ this.delay = options.delay;
+ }
+ }
+ CheckComplete() {
+ if (this.done) {
+ return;
+ }
+ if (this.processClosed) {
+ this._setResult();
+ }
+ else if (this.processExited) {
+ this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
+ }
+ }
+ _debug(message) {
+ this.emit('debug', message);
+ }
+ _setResult() {
+ // determine whether there is an error
+ let error;
+ if (this.processExited) {
+ if (this.processError) {
+ error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
+ }
+ else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
+ error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
+ }
+ else if (this.processStderr && this.options.failOnStdErr) {
+ error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
+ }
+ }
+ // clear the timeout
+ if (this.timeout) {
+ clearTimeout(this.timeout);
+ this.timeout = null;
+ }
+ this.done = true;
+ this.emit('done', error, this.processExitCode);
+ }
+ static HandleTimeout(state) {
+ if (state.done) {
+ return;
+ }
+ if (!state.processClosed && state.processExited) {
+ const message = `The STDIO streams did not close within ${state.delay /
+ 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
+ state._debug(message);
+ }
+ state._setResult();
+ }
+}
+//# sourceMappingURL=toolrunner.js.map
+
+/***/ }),
+
+/***/ 689:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+try {
+ var util = __webpack_require__(669);
+ /* istanbul ignore next */
+ if (typeof util.inherits !== 'function') throw '';
+ module.exports = util.inherits;
+} catch (e) {
+ /* istanbul ignore next */
+ module.exports = __webpack_require__(315);
+}
+
+
+/***/ }),
+
+/***/ 692:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 696:
+/***/ (function(module) {
+
+"use strict";
+
+
+/*!
+ * isobject
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObject(val) {
+ return val != null && typeof val === 'object' && Array.isArray(val) === false;
+}
+
+/*!
+ * is-plain-object
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObjectObject(o) {
+ return isObject(o) === true
+ && Object.prototype.toString.call(o) === '[object Object]';
+}
+
+function isPlainObject(o) {
+ var ctor,prot;
+
+ if (isObjectObject(o) === false) return false;
+
+ // If has modified constructor
+ ctor = o.constructor;
+ if (typeof ctor !== 'function') return false;
+
+ // If has modified prototype
+ prot = ctor.prototype;
+ if (isObjectObject(prot) === false) return false;
+
+ // If constructor does not have an Object-specific method
+ if (prot.hasOwnProperty('isPrototypeOf') === false) {
+ return false;
+ }
+
+ // Most likely a plain Object
+ return true;
+}
+
+module.exports = isPlainObject;
+
+
+/***/ }),
+
+/***/ 697:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = (promise, onFinally) => {
+ onFinally = onFinally || (() => {});
+
+ return promise.then(
+ val => new Promise(resolve => {
+ resolve(onFinally());
+ }).then(() => val),
+ err => new Promise(resolve => {
+ resolve(onFinally());
+ }).then(() => {
+ throw err;
+ })
+ );
+};
+
+
+/***/ }),
+
+/***/ 722:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const restore_1 = __importDefault(__webpack_require__(206));
+const save_1 = __importDefault(__webpack_require__(781));
+const crypto = __importStar(__webpack_require__(417));
+const fs = __importStar(__webpack_require__(747));
+function checksumFile(hashName, path) {
+ return new Promise((resolve, reject) => {
+ const hash = crypto.createHash(hashName);
+ const stream = fs.createReadStream(path);
+ stream.on("error", err => reject(err));
+ stream.on("data", chunk => hash.update(chunk));
+ stream.on("end", () => resolve(hash.digest("hex")));
+ });
+}
+const pathExists = (path) => __awaiter(void 0, void 0, void 0, function* () { return !!(yield fs.promises.stat(path).catch(() => false)); });
+const getLintCacheDir = () => `${process.env.HOME}/.cache/golangci-lint`;
+const getCacheDirs = () => {
+ // Not existing dirs are ok here: it works.
+ return [getLintCacheDir(), `${process.env.HOME}/.cache/go-build`, `${process.env.HOME}/go/pkg`];
+};
+const getIntervalKey = (invalidationIntervalDays) => {
+ const now = new Date();
+ const secondsSinceEpoch = now.getTime() / 1000;
+ const intervalNumber = Math.floor(secondsSinceEpoch / (invalidationIntervalDays * 86400));
+ return intervalNumber.toString();
+};
+function buildCacheKeys() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const keys = [];
+ let cacheKey = `golangci-lint.cache-`;
+ keys.push(cacheKey);
+ // Periodically invalidate a cache because a new code being added.
+ // TODO: configure it via inputs.
+ cacheKey += `${getIntervalKey(7)}-`;
+ keys.push(cacheKey);
+ if (yield pathExists(`go.mod`)) {
+ // Add checksum to key to invalidate a cache when dependencies change.
+ cacheKey += yield checksumFile(`sha1`, `go.mod`);
+ }
+ else {
+ cacheKey += `nogomod`;
+ }
+ keys.push(cacheKey);
+ return keys;
+ });
+}
+function restoreCache() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ const keys = yield buildCacheKeys();
+ const primaryKey = keys.pop();
+ const restoreKeys = keys.reverse();
+ core.info(`Primary analysis cache key is '${primaryKey}', restore keys are '${restoreKeys.join(` | `)}'`);
+ process.env[`INPUT_RESTORE-KEYS`] = restoreKeys.join(`\n`);
+ process.env.INPUT_KEY = primaryKey;
+ process.env.INPUT_PATH = getCacheDirs().join(`\n`);
+ // Tell golangci-lint to use our cache directory.
+ process.env.GOLANGCI_LINT_CACHE = getLintCacheDir();
+ yield restore_1.default();
+ core.info(`Restored cache for golangci-lint from key '${primaryKey}' in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.restoreCache = restoreCache;
+function saveCache() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const startedAt = Date.now();
+ const cacheDirs = getCacheDirs();
+ process.env.INPUT_PATH = cacheDirs.join(`\n`);
+ yield save_1.default();
+ core.info(`Saved cache for golangci-lint from paths '${cacheDirs.join(`, `)}' in ${Date.now() - startedAt}ms`);
+ });
+}
+exports.saveCache = saveCache;
+
+
+/***/ }),
+
+/***/ 728:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+class SearchState {
+ constructor(path, level) {
+ this.path = path;
+ this.level = level;
+ }
+}
+exports.SearchState = SearchState;
+//# sourceMappingURL=internal-search-state.js.map
+
+/***/ }),
+
+/***/ 741:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const { requestLog } = __webpack_require__(916);
+const {
+ restEndpointMethods
+} = __webpack_require__(559);
+
+const Core = __webpack_require__(37);
+
+const CORE_PLUGINS = [
+ __webpack_require__(890),
+ __webpack_require__(953), // deprecated: remove in v17
+ requestLog,
+ __webpack_require__(786),
+ restEndpointMethods,
+ __webpack_require__(341),
+
+ __webpack_require__(850) // deprecated: remove in v17
+];
+
+const OctokitRest = Core.plugin(CORE_PLUGINS);
+
+function DeprecatedOctokit(options) {
+ const warn =
+ options && options.log && options.log.warn
+ ? options.log.warn
+ : console.warn;
+ warn(
+ '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead'
+ );
+ return new OctokitRest(options);
+}
+
+const Octokit = Object.assign(DeprecatedOctokit, {
+ Octokit: OctokitRest
+});
+
+Object.keys(OctokitRest).forEach(key => {
+ /* istanbul ignore else */
+ if (OctokitRest.hasOwnProperty(key)) {
+ Octokit[key] = OctokitRest[key];
+ }
+});
+
+module.exports = Octokit;
+
+
+/***/ }),
+
+/***/ 742:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var fs = __webpack_require__(747)
+var core
+if (process.platform === 'win32' || global.TESTING_WINDOWS) {
+ core = __webpack_require__(818)
+} else {
+ core = __webpack_require__(197)
+}
+
+module.exports = isexe
+isexe.sync = sync
+
+function isexe (path, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ if (!cb) {
+ if (typeof Promise !== 'function') {
+ throw new TypeError('callback not provided')
+ }
+
+ return new Promise(function (resolve, reject) {
+ isexe(path, options || {}, function (er, is) {
+ if (er) {
+ reject(er)
+ } else {
+ resolve(is)
+ }
+ })
+ })
+ }
+
+ core(path, options || {}, function (er, is) {
+ // ignore EACCES because that just means we aren't allowed to run it
+ if (er) {
+ if (er.code === 'EACCES' || options && options.ignoreErrors) {
+ er = null
+ is = false
+ }
+ }
+ cb(er, is)
+ })
+}
+
+function sync (path, options) {
+ // my kingdom for a filtered catch
+ try {
+ return core.sync(path, options || {})
+ } catch (er) {
+ if (options && options.ignoreErrors || er.code === 'EACCES') {
+ return false
+ } else {
+ throw er
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 747:
+/***/ (function(module) {
+
+module.exports = require("fs");
+
+/***/ }),
+
+/***/ 753:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var endpoint = __webpack_require__(385);
+var universalUserAgent = __webpack_require__(211);
+var isPlainObject = _interopDefault(__webpack_require__(696));
+var nodeFetch = _interopDefault(__webpack_require__(454));
+var requestError = __webpack_require__(463);
+
+const VERSION = "5.4.2";
+
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+function fetchWrapper(requestOptions) {
+ if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+
+ let headers = {};
+ let status;
+ let url;
+ const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
+ return fetch(requestOptions.url, Object.assign({
+ method: requestOptions.method,
+ body: requestOptions.body,
+ headers: requestOptions.headers,
+ redirect: requestOptions.redirect
+ }, requestOptions.request)).then(response => {
+ url = response.url;
+ status = response.status;
+
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+
+ if (status === 204 || status === 205) {
+ return;
+ } // GitHub API returns 200 for HEAD requests
+
+
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+
+ throw new requestError.RequestError(response.statusText, status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status === 304) {
+ throw new requestError.RequestError("Not modified", status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status >= 400) {
+ return response.text().then(message => {
+ const error = new requestError.RequestError(message, status, {
+ headers,
+ request: requestOptions
+ });
+
+ try {
+ let responseBody = JSON.parse(error.message);
+ Object.assign(error, responseBody);
+ let errors = responseBody.errors; // Assumption `errors` would always be in Array format
+
+ error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
+ } catch (e) {// ignore, see octokit/rest.js#684
+ }
+
+ throw error;
+ });
+ }
+
+ const contentType = response.headers.get("content-type");
+
+ if (/application\/json/.test(contentType)) {
+ return response.json();
+ }
+
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+
+ return getBufferResponse(response);
+ }).then(data => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch(error => {
+ if (error instanceof requestError.RequestError) {
+ throw error;
+ }
+
+ throw new requestError.RequestError(error.message, 500, {
+ headers,
+ request: requestOptions
+ });
+ });
+}
+
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint = oldEndpoint.defaults(newDefaults);
+
+ const newApi = function (route, parameters) {
+ const endpointOptions = endpoint.merge(route, parameters);
+
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint.parse(endpointOptions));
+ }
+
+ const request = (route, parameters) => {
+ return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
+ };
+
+ Object.assign(request, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+ return endpointOptions.request.hook(request, endpointOptions);
+ };
+
+ return Object.assign(newApi, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+}
+
+const request = withDefaults(endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ }
+});
+
+exports.request = request;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 760:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
+var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __webpack_require__(470);
+const fs = __webpack_require__(747);
+const globOptionsHelper = __webpack_require__(601);
+const path = __webpack_require__(622);
+const patternHelper = __webpack_require__(597);
+const internal_match_kind_1 = __webpack_require__(327);
+const internal_pattern_1 = __webpack_require__(923);
+const internal_search_state_1 = __webpack_require__(728);
+const IS_WINDOWS = process.platform === 'win32';
+class DefaultGlobber {
+ constructor(options) {
+ this.patterns = [];
+ this.searchPaths = [];
+ this.options = globOptionsHelper.getOptions(options);
+ }
+ getSearchPaths() {
+ // Return a copy
+ return this.searchPaths.slice();
+ }
+ glob() {
+ var e_1, _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = [];
+ try {
+ for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
+ const itemPath = _c.value;
+ result.push(itemPath);
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ return result;
+ });
+ }
+ globGenerator() {
+ return __asyncGenerator(this, arguments, function* globGenerator_1() {
+ // Fill in defaults options
+ const options = globOptionsHelper.getOptions(this.options);
+ // Implicit descendants?
+ const patterns = [];
+ for (const pattern of this.patterns) {
+ patterns.push(pattern);
+ if (options.implicitDescendants &&
+ (pattern.trailingSeparator ||
+ pattern.segments[pattern.segments.length - 1] !== '**')) {
+ patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
+ }
+ }
+ // Push the search paths
+ const stack = [];
+ for (const searchPath of patternHelper.getSearchPaths(patterns)) {
+ core.debug(`Search path '${searchPath}'`);
+ // Exists?
+ try {
+ // Intentionally using lstat. Detection for broken symlink
+ // will be performed later (if following symlinks).
+ yield __await(fs.promises.lstat(searchPath));
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ continue;
+ }
+ throw err;
+ }
+ stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
+ }
+ // Search
+ const traversalChain = []; // used to detect cycles
+ while (stack.length) {
+ // Pop
+ const item = stack.pop();
+ // Match?
+ const match = patternHelper.match(patterns, item.path);
+ const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
+ if (!match && !partialMatch) {
+ continue;
+ }
+ // Stat
+ const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ );
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ if (!stats) {
+ continue;
+ }
+ // Directory
+ if (stats.isDirectory()) {
+ // Matched
+ if (match & internal_match_kind_1.MatchKind.Directory) {
+ yield yield __await(item.path);
+ }
+ // Descend?
+ else if (!partialMatch) {
+ continue;
+ }
+ // Push the child items in reverse
+ const childLevel = item.level + 1;
+ const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
+ stack.push(...childItems.reverse());
+ }
+ // File
+ else if (match & internal_match_kind_1.MatchKind.File) {
+ yield yield __await(item.path);
+ }
+ }
+ });
+ }
+ /**
+ * Constructs a DefaultGlobber
+ */
+ static create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = new DefaultGlobber(options);
+ if (IS_WINDOWS) {
+ patterns = patterns.replace(/\r\n/g, '\n');
+ patterns = patterns.replace(/\r/g, '\n');
+ }
+ const lines = patterns.split('\n').map(x => x.trim());
+ for (const line of lines) {
+ // Empty or comment
+ if (!line || line.startsWith('#')) {
+ continue;
+ }
+ // Pattern
+ else {
+ result.patterns.push(new internal_pattern_1.Pattern(line));
+ }
+ }
+ result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
+ return result;
+ });
+ }
+ static stat(item, options, traversalChain) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Note:
+ // `stat` returns info about the target of a symlink (or symlink chain)
+ // `lstat` returns info about a symlink itself
+ let stats;
+ if (options.followSymbolicLinks) {
+ try {
+ // Use `stat` (following symlinks)
+ stats = yield fs.promises.stat(item.path);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ if (options.omitBrokenSymbolicLinks) {
+ core.debug(`Broken symlink '${item.path}'`);
+ return undefined;
+ }
+ throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
+ }
+ throw err;
+ }
+ }
+ else {
+ // Use `lstat` (not following symlinks)
+ stats = yield fs.promises.lstat(item.path);
+ }
+ // Note, isDirectory() returns false for the lstat of a symlink
+ if (stats.isDirectory() && options.followSymbolicLinks) {
+ // Get the realpath
+ const realPath = yield fs.promises.realpath(item.path);
+ // Fixup the traversal chain to match the item level
+ while (traversalChain.length >= item.level) {
+ traversalChain.pop();
+ }
+ // Test for a cycle
+ if (traversalChain.some((x) => x === realPath)) {
+ core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+ return undefined;
+ }
+ // Update the traversal chain
+ traversalChain.push(realPath);
+ }
+ return stats;
+ });
+ }
+}
+exports.DefaultGlobber = DefaultGlobber;
+//# sourceMappingURL=internal-globber.js.map
+
+/***/ }),
+
+/***/ 761:
+/***/ (function(module) {
+
+module.exports = require("zlib");
+
+/***/ }),
+
+/***/ 763:
+/***/ (function(module) {
+
+module.exports = removeHook
+
+function removeHook (state, name, method) {
+ if (!state.registry[name]) {
+ return
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) { return registered.orig })
+ .indexOf(method)
+
+ if (index === -1) {
+ return
+ }
+
+ state.registry[name].splice(index, 1)
+}
+
+
+/***/ }),
+
+/***/ 768:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = function (x) {
+ var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt();
+ var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt();
+
+ if (x[x.length - 1] === lf) {
+ x = x.slice(0, x.length - 1);
+ }
+
+ if (x[x.length - 1] === cr) {
+ x = x.slice(0, x.length - 1);
+ }
+
+ return x;
+};
+
+
+/***/ }),
+
+/***/ 777:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = getFirstPage
+
+const getPage = __webpack_require__(265)
+
+function getFirstPage (octokit, link, headers) {
+ return getPage(octokit, link, 'first', headers)
+}
+
+
+/***/ }),
+
+/***/ 781:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const path = __importStar(__webpack_require__(622));
+const cacheHttpClient = __importStar(__webpack_require__(342));
+const constants_1 = __webpack_require__(573);
+const tar_1 = __webpack_require__(570);
+const utils = __importStar(__webpack_require__(867));
+function run() {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ if (!utils.isValidEvent()) {
+ utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
+ .getSupportedEvents()
+ .join(", ")} events are supported at this time.`);
+ return;
+ }
+ const state = utils.getCacheState();
+ // Inputs are re-evaluted before the post action, so we want the original key used for restore
+ const primaryKey = core.getState(constants_1.State.CacheKey);
+ if (!primaryKey) {
+ utils.logWarning(`Error retrieving key from state.`);
+ return;
+ }
+ if (utils.isExactKeyMatch(primaryKey, state)) {
+ core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
+ return;
+ }
+ core.debug("Reserving Cache");
+ const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
+ if (cacheId == -1) {
+ core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
+ return;
+ }
+ core.debug(`Cache ID: ${cacheId}`);
+ const cachePaths = yield utils.resolvePaths(core
+ .getInput(constants_1.Inputs.Path, { required: true })
+ .split("\n")
+ .filter(x => x !== ""));
+ core.debug("Cache Paths:");
+ core.debug(`${JSON.stringify(cachePaths)}`);
+ const archiveFolder = yield utils.createTempDirectory();
+ const archivePath = path.join(archiveFolder, constants_1.CacheFilename);
+ core.debug(`Archive Path: ${archivePath}`);
+ yield tar_1.createTar(archiveFolder, cachePaths);
+ const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
+ const archiveFileSize = utils.getArchiveFileSize(archivePath);
+ core.debug(`File Size: ${archiveFileSize}`);
+ if (archiveFileSize > fileSizeLimit) {
+ utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
+ return;
+ }
+ core.debug(`Saving Cache (ID: ${cacheId})`);
+ yield cacheHttpClient.saveCache(cacheId, archivePath);
+ }
+ catch (error) {
+ utils.logWarning(error.message);
+ }
+ });
+}
+exports.default = run;
+
+
+/***/ }),
+
+/***/ 786:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = paginatePlugin;
+
+const { paginateRest } = __webpack_require__(322);
+
+function paginatePlugin(octokit) {
+ Object.assign(octokit, paginateRest(octokit));
+}
+
+
+/***/ }),
+
+/***/ 790:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticate;
+
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
+
+function authenticate(state, options) {
+ deprecateAuthenticate(
+ state.octokit.log,
+ new Deprecation(
+ '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.'
+ )
+ );
+
+ if (!options) {
+ state.auth = false;
+ return;
+ }
+
+ switch (options.type) {
+ case "basic":
+ if (!options.username || !options.password) {
+ throw new Error(
+ "Basic authentication requires both a username and password to be set"
+ );
+ }
+ break;
+
+ case "oauth":
+ if (!options.token && !(options.key && options.secret)) {
+ throw new Error(
+ "OAuth2 authentication requires a token or key & secret to be set"
+ );
+ }
+ break;
+
+ case "token":
+ case "app":
+ if (!options.token) {
+ throw new Error("Token authentication requires a token to be set");
+ }
+ break;
+
+ default:
+ throw new Error(
+ "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'"
+ );
+ }
+
+ state.auth = options;
+}
+
+
+/***/ }),
+
+/***/ 796:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var osName = _interopDefault(__webpack_require__(2));
+
+function getUserAgent() {
+ try {
+ return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
+ } catch (error) {
+ if (/wmic os get Caption/.test(error.message)) {
+ return "Windows ";
+ }
+
+ throw error;
+ }
+}
+
+exports.getUserAgent = getUserAgent;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 813:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+async function auth(token) {
+ const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
+ return {
+ type: "token",
+ token: token,
+ tokenType
+ };
+}
+
+/**
+ * Prefix token for usage in the Authorization header
+ *
+ * @param token OAuth token or JSON Web Token
+ */
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
+ }
+
+ return `token ${token}`;
+}
+
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(route, parameters);
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
+}
+
+const createTokenAuth = function createTokenAuth(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
+
+ if (typeof token !== "string") {
+ throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
+ }
+
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
+
+exports.createTokenAuth = createTokenAuth;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 814:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = which
+which.sync = whichSync
+
+var isWindows = process.platform === 'win32' ||
+ process.env.OSTYPE === 'cygwin' ||
+ process.env.OSTYPE === 'msys'
+
+var path = __webpack_require__(622)
+var COLON = isWindows ? ';' : ':'
+var isexe = __webpack_require__(742)
+
+function getNotFoundError (cmd) {
+ var er = new Error('not found: ' + cmd)
+ er.code = 'ENOENT'
+
+ return er
+}
+
+function getPathInfo (cmd, opt) {
+ var colon = opt.colon || COLON
+ var pathEnv = opt.path || process.env.PATH || ''
+ var pathExt = ['']
+
+ pathEnv = pathEnv.split(colon)
+
+ var pathExtExe = ''
+ if (isWindows) {
+ pathEnv.unshift(process.cwd())
+ pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
+ pathExt = pathExtExe.split(colon)
+
+
+ // Always test the cmd itself first. isexe will check to make sure
+ // it's found in the pathExt set.
+ if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
+ pathExt.unshift('')
+ }
+
+ // If it has a slash, then we don't bother searching the pathenv.
+ // just check the file itself, and that's it.
+ if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
+ pathEnv = ['']
+
+ return {
+ env: pathEnv,
+ ext: pathExt,
+ extExe: pathExtExe
+ }
+}
+
+function which (cmd, opt, cb) {
+ if (typeof opt === 'function') {
+ cb = opt
+ opt = {}
+ }
+
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
+
+ ;(function F (i, l) {
+ if (i === l) {
+ if (opt.all && found.length)
+ return cb(null, found)
+ else
+ return cb(getNotFoundError(cmd))
+ }
+
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
+
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
+ ;(function E (ii, ll) {
+ if (ii === ll) return F(i + 1, l)
+ var ext = pathExt[ii]
+ isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
+ if (!er && is) {
+ if (opt.all)
+ found.push(p + ext)
+ else
+ return cb(null, p + ext)
+ }
+ return E(ii + 1, ll)
+ })
+ })(0, pathExt.length)
+ })(0, pathEnv.length)
+}
+
+function whichSync (cmd, opt) {
+ opt = opt || {}
+
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
+
+ for (var i = 0, l = pathEnv.length; i < l; i ++) {
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
+
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && /^\.[\\\/]/.test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
+ for (var j = 0, ll = pathExt.length; j < ll; j ++) {
+ var cur = p + pathExt[j]
+ var is
+ try {
+ is = isexe.sync(cur, { pathExt: pathExtExe })
+ if (is) {
+ if (opt.all)
+ found.push(cur)
+ else
+ return cur
+ }
+ } catch (ex) {}
+ }
+ }
+
+ if (opt.all && found.length)
+ return found
+
+ if (opt.nothrow)
+ return null
+
+ throw getNotFoundError(cmd)
+}
+
+
+/***/ }),
+
+/***/ 816:
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = /^#!.*/;
+
+
+/***/ }),
+
+/***/ 818:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = isexe
+isexe.sync = sync
+
+var fs = __webpack_require__(747)
+
+function checkPathExt (path, options) {
+ var pathext = options.pathExt !== undefined ?
+ options.pathExt : process.env.PATHEXT
+
+ if (!pathext) {
+ return true
+ }
+
+ pathext = pathext.split(';')
+ if (pathext.indexOf('') !== -1) {
+ return true
+ }
+ for (var i = 0; i < pathext.length; i++) {
+ var p = pathext[i].toLowerCase()
+ if (p && path.substr(-p.length).toLowerCase() === p) {
+ return true
+ }
+ }
+ return false
+}
+
+function checkStat (stat, path, options) {
+ if (!stat.isSymbolicLink() && !stat.isFile()) {
+ return false
+ }
+ return checkPathExt(path, options)
+}
+
+function isexe (path, options, cb) {
+ fs.stat(path, function (er, stat) {
+ cb(er, er ? false : checkStat(stat, path, options))
+ })
+}
+
+function sync (path, options) {
+ return checkStat(fs.statSync(path), path, options)
+}
+
+
+/***/ }),
+
+/***/ 826:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var rng = __webpack_require__(139);
+var bytesToUuid = __webpack_require__(105);
+
+function v4(options, buf, offset) {
+ var i = buf && offset || 0;
+
+ if (typeof(options) == 'string') {
+ buf = options === 'binary' ? new Array(16) : null;
+ options = null;
+ }
+ options = options || {};
+
+ var rnds = options.random || (options.rng || rng)();
+
+ // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+ rnds[6] = (rnds[6] & 0x0f) | 0x40;
+ rnds[8] = (rnds[8] & 0x3f) | 0x80;
+
+ // Copy bytes to buffer, if provided
+ if (buf) {
+ for (var ii = 0; ii < 16; ++ii) {
+ buf[i + ii] = rnds[ii];
+ }
+ }
+
+ return buf || bytesToUuid(rnds);
+}
+
+module.exports = v4;
+
+
+/***/ }),
+
+/***/ 835:
+/***/ (function(module) {
+
+module.exports = require("url");
+
+/***/ }),
+
+/***/ 850:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = paginationMethodsPlugin
+
+function paginationMethodsPlugin (octokit) {
+ octokit.getFirstPage = __webpack_require__(777).bind(null, octokit)
+ octokit.getLastPage = __webpack_require__(649).bind(null, octokit)
+ octokit.getNextPage = __webpack_require__(550).bind(null, octokit)
+ octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit)
+ octokit.hasFirstPage = __webpack_require__(536)
+ octokit.hasLastPage = __webpack_require__(336)
+ octokit.hasNextPage = __webpack_require__(929)
+ octokit.hasPreviousPage = __webpack_require__(558)
+}
+
+
+/***/ }),
+
+/***/ 854:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the `TypeError` message for "Functions" methods. */
+var FUNC_ERROR_TEXT = 'Expected a function';
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]',
+ symbolTag = '[object Symbol]';
+
+/** Used to match property names within property paths. */
+var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
+ reIsPlainProp = /^\w*$/,
+ reLeadingDot = /^\./,
+ rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to match backslashes in property paths. */
+var reEscapeChar = /\\(\\)?/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var Symbol = root.Symbol,
+ splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ nativeCreate = getNative(Object, 'create');
+
+/** Used to convert symbols to primitives and strings. */
+var symbolProto = Symbol ? Symbol.prototype : undefined,
+ symbolToString = symbolProto ? symbolProto.toString : undefined;
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.get` without support for default values.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {Array|string} path The path of the property to get.
+ * @returns {*} Returns the resolved value.
+ */
+function baseGet(object, path) {
+ path = isKey(path, object) ? [path] : castPath(path);
+
+ var index = 0,
+ length = path.length;
+
+ while (object != null && index < length) {
+ object = object[toKey(path[index++])];
+ }
+ return (index && index == length) ? object : undefined;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.toString` which doesn't convert nullish
+ * values to empty strings.
+ *
+ * @private
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ */
+function baseToString(value) {
+ // Exit early for strings to avoid a performance hit in some environments.
+ if (typeof value == 'string') {
+ return value;
+ }
+ if (isSymbol(value)) {
+ return symbolToString ? symbolToString.call(value) : '';
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Casts `value` to a path array if it's not one.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {Array} Returns the cast property path array.
+ */
+function castPath(value) {
+ return isArray(value) ? value : stringToPath(value);
+}
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is a property name and not a property path.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {Object} [object] The object to query keys on.
+ * @returns {boolean} Returns `true` if `value` is a property name, else `false`.
+ */
+function isKey(value, object) {
+ if (isArray(value)) {
+ return false;
+ }
+ var type = typeof value;
+ if (type == 'number' || type == 'symbol' || type == 'boolean' ||
+ value == null || isSymbol(value)) {
+ return true;
+ }
+ return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
+ (object != null && value in Object(object));
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `string` to a property path array.
+ *
+ * @private
+ * @param {string} string The string to convert.
+ * @returns {Array} Returns the property path array.
+ */
+var stringToPath = memoize(function(string) {
+ string = toString(string);
+
+ var result = [];
+ if (reLeadingDot.test(string)) {
+ result.push('');
+ }
+ string.replace(rePropName, function(match, number, quote, string) {
+ result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
+ });
+ return result;
+});
+
+/**
+ * Converts `value` to a string key if it's not a string or symbol.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {string|symbol} Returns the key.
+ */
+function toKey(value) {
+ if (typeof value == 'string' || isSymbol(value)) {
+ return value;
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a function that memoizes the result of `func`. If `resolver` is
+ * provided, it determines the cache key for storing the result based on the
+ * arguments provided to the memoized function. By default, the first argument
+ * provided to the memoized function is used as the map cache key. The `func`
+ * is invoked with the `this` binding of the memoized function.
+ *
+ * **Note:** The cache is exposed as the `cache` property on the memoized
+ * function. Its creation may be customized by replacing the `_.memoize.Cache`
+ * constructor with one whose instances implement the
+ * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
+ * method interface of `delete`, `get`, `has`, and `set`.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Function
+ * @param {Function} func The function to have its output memoized.
+ * @param {Function} [resolver] The function to resolve the cache key.
+ * @returns {Function} Returns the new memoized function.
+ * @example
+ *
+ * var object = { 'a': 1, 'b': 2 };
+ * var other = { 'c': 3, 'd': 4 };
+ *
+ * var values = _.memoize(_.values);
+ * values(object);
+ * // => [1, 2]
+ *
+ * values(other);
+ * // => [3, 4]
+ *
+ * object.a = 2;
+ * values(object);
+ * // => [1, 2]
+ *
+ * // Modify the result cache.
+ * values.cache.set(object, ['a', 'b']);
+ * values(object);
+ * // => ['a', 'b']
+ *
+ * // Replace `_.memoize.Cache`.
+ * _.memoize.Cache = WeakMap;
+ */
+function memoize(func, resolver) {
+ if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
+ throw new TypeError(FUNC_ERROR_TEXT);
+ }
+ var memoized = function() {
+ var args = arguments,
+ key = resolver ? resolver.apply(this, args) : args[0],
+ cache = memoized.cache;
+
+ if (cache.has(key)) {
+ return cache.get(key);
+ }
+ var result = func.apply(this, args);
+ memoized.cache = cache.set(key, result);
+ return result;
+ };
+ memoized.cache = new (memoize.Cache || MapCache);
+ return memoized;
+}
+
+// Assign cache to `_.memoize`.
+memoize.Cache = MapCache;
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as an `Array` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an array, else `false`.
+ * @example
+ *
+ * _.isArray([1, 2, 3]);
+ * // => true
+ *
+ * _.isArray(document.body.children);
+ * // => false
+ *
+ * _.isArray('abc');
+ * // => false
+ *
+ * _.isArray(_.noop);
+ * // => false
+ */
+var isArray = Array.isArray;
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * Checks if `value` is object-like. A value is object-like if it's not `null`
+ * and has a `typeof` result of "object".
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
+ * @example
+ *
+ * _.isObjectLike({});
+ * // => true
+ *
+ * _.isObjectLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isObjectLike(_.noop);
+ * // => false
+ *
+ * _.isObjectLike(null);
+ * // => false
+ */
+function isObjectLike(value) {
+ return !!value && typeof value == 'object';
+}
+
+/**
+ * Checks if `value` is classified as a `Symbol` primitive or object.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
+ * @example
+ *
+ * _.isSymbol(Symbol.iterator);
+ * // => true
+ *
+ * _.isSymbol('abc');
+ * // => false
+ */
+function isSymbol(value) {
+ return typeof value == 'symbol' ||
+ (isObjectLike(value) && objectToString.call(value) == symbolTag);
+}
+
+/**
+ * Converts `value` to a string. An empty string is returned for `null`
+ * and `undefined` values. The sign of `-0` is preserved.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ * @example
+ *
+ * _.toString(null);
+ * // => ''
+ *
+ * _.toString(-0);
+ * // => '-0'
+ *
+ * _.toString([1, 2, 3]);
+ * // => '1,2,3'
+ */
+function toString(value) {
+ return value == null ? '' : baseToString(value);
+}
+
+/**
+ * Gets the value at `path` of `object`. If the resolved value is
+ * `undefined`, the `defaultValue` is returned in its place.
+ *
+ * @static
+ * @memberOf _
+ * @since 3.7.0
+ * @category Object
+ * @param {Object} object The object to query.
+ * @param {Array|string} path The path of the property to get.
+ * @param {*} [defaultValue] The value returned for `undefined` resolved values.
+ * @returns {*} Returns the resolved value.
+ * @example
+ *
+ * var object = { 'a': [{ 'b': { 'c': 3 } }] };
+ *
+ * _.get(object, 'a[0].b.c');
+ * // => 3
+ *
+ * _.get(object, ['a', '0', 'b', 'c']);
+ * // => 3
+ *
+ * _.get(object, 'a.b.c', 'default');
+ * // => 'default'
+ */
+function get(object, path, defaultValue) {
+ var result = object == null ? undefined : baseGet(object, path);
+ return result === undefined ? defaultValue : result;
+}
+
+module.exports = get;
+
+
+/***/ }),
+
+/***/ 856:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = __webpack_require__(141);
+
+
+/***/ }),
+
+/***/ 861:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = registerPlugin;
+
+const factory = __webpack_require__(513);
+
+function registerPlugin(plugins, pluginFunction) {
+ return factory(
+ plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction)
+ );
+}
+
+
+/***/ }),
+
+/***/ 864:
/***/ (function(module, exports) {
exports = module.exports = SemVer
@@ -4252,3396 +33123,31 @@ function coerce (version, options) {
/***/ }),
-/***/ 281:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const internal_globber_1 = __webpack_require__(297);
-/**
- * Constructs a globber
- *
- * @param patterns Patterns separated by newlines
- * @param options Glob options
- */
-function create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- return yield internal_globber_1.DefaultGlobber.create(patterns, options);
- });
-}
-exports.create = create;
-//# sourceMappingURL=glob.js.map
-
-/***/ }),
-
-/***/ 297:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __asyncValues = (this && this.__asyncValues) || function (o) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var m = o[Symbol.asyncIterator], i;
- return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
- function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
- function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
-};
-var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
-var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var g = generator.apply(thisArg, _arguments || []), i, q = [];
- return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
- function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
- function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
- function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
- function fulfill(value) { resume("next", value); }
- function reject(value) { resume("throw", value); }
- function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __webpack_require__(470);
-const fs = __webpack_require__(747);
-const globOptionsHelper = __webpack_require__(601);
-const path = __webpack_require__(622);
-const patternHelper = __webpack_require__(597);
-const internal_match_kind_1 = __webpack_require__(327);
-const internal_pattern_1 = __webpack_require__(923);
-const internal_search_state_1 = __webpack_require__(728);
-const IS_WINDOWS = process.platform === 'win32';
-class DefaultGlobber {
- constructor(options) {
- this.patterns = [];
- this.searchPaths = [];
- this.options = globOptionsHelper.getOptions(options);
- }
- getSearchPaths() {
- // Return a copy
- return this.searchPaths.slice();
- }
- glob() {
- var e_1, _a;
- return __awaiter(this, void 0, void 0, function* () {
- const result = [];
- try {
- for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
- const itemPath = _c.value;
- result.push(itemPath);
- }
- }
- catch (e_1_1) { e_1 = { error: e_1_1 }; }
- finally {
- try {
- if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
- }
- finally { if (e_1) throw e_1.error; }
- }
- return result;
- });
- }
- globGenerator() {
- return __asyncGenerator(this, arguments, function* globGenerator_1() {
- // Fill in defaults options
- const options = globOptionsHelper.getOptions(this.options);
- // Implicit descendants?
- const patterns = [];
- for (const pattern of this.patterns) {
- patterns.push(pattern);
- if (options.implicitDescendants &&
- (pattern.trailingSeparator ||
- pattern.segments[pattern.segments.length - 1] !== '**')) {
- patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
- }
- }
- // Push the search paths
- const stack = [];
- for (const searchPath of patternHelper.getSearchPaths(patterns)) {
- core.debug(`Search path '${searchPath}'`);
- // Exists?
- try {
- // Intentionally using lstat. Detection for broken symlink
- // will be performed later (if following symlinks).
- yield __await(fs.promises.lstat(searchPath));
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- continue;
- }
- throw err;
- }
- stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
- }
- // Search
- const traversalChain = []; // used to detect cycles
- while (stack.length) {
- // Pop
- const item = stack.pop();
- // Match?
- const match = patternHelper.match(patterns, item.path);
- const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
- if (!match && !partialMatch) {
- continue;
- }
- // Stat
- const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
- // Broken symlink, or symlink cycle detected, or no longer exists
- );
- // Broken symlink, or symlink cycle detected, or no longer exists
- if (!stats) {
- continue;
- }
- // Directory
- if (stats.isDirectory()) {
- // Matched
- if (match & internal_match_kind_1.MatchKind.Directory) {
- yield yield __await(item.path);
- }
- // Descend?
- else if (!partialMatch) {
- continue;
- }
- // Push the child items in reverse
- const childLevel = item.level + 1;
- const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
- stack.push(...childItems.reverse());
- }
- // File
- else if (match & internal_match_kind_1.MatchKind.File) {
- yield yield __await(item.path);
- }
- }
- });
- }
- /**
- * Constructs a DefaultGlobber
- */
- static create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const result = new DefaultGlobber(options);
- if (IS_WINDOWS) {
- patterns = patterns.replace(/\r\n/g, '\n');
- patterns = patterns.replace(/\r/g, '\n');
- }
- const lines = patterns.split('\n').map(x => x.trim());
- for (const line of lines) {
- // Empty or comment
- if (!line || line.startsWith('#')) {
- continue;
- }
- // Pattern
- else {
- result.patterns.push(new internal_pattern_1.Pattern(line));
- }
- }
- result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
- return result;
- });
- }
- static stat(item, options, traversalChain) {
- return __awaiter(this, void 0, void 0, function* () {
- // Note:
- // `stat` returns info about the target of a symlink (or symlink chain)
- // `lstat` returns info about a symlink itself
- let stats;
- if (options.followSymbolicLinks) {
- try {
- // Use `stat` (following symlinks)
- stats = yield fs.promises.stat(item.path);
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- if (options.omitBrokenSymbolicLinks) {
- core.debug(`Broken symlink '${item.path}'`);
- return undefined;
- }
- throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
- }
- throw err;
- }
- }
- else {
- // Use `lstat` (not following symlinks)
- stats = yield fs.promises.lstat(item.path);
- }
- // Note, isDirectory() returns false for the lstat of a symlink
- if (stats.isDirectory() && options.followSymbolicLinks) {
- // Get the realpath
- const realPath = yield fs.promises.realpath(item.path);
- // Fixup the traversal chain to match the item level
- while (traversalChain.length >= item.level) {
- traversalChain.pop();
- }
- // Test for a cycle
- if (traversalChain.some((x) => x === realPath)) {
- core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
- return undefined;
- }
- // Update the traversal chain
- traversalChain.push(realPath);
- }
- return stats;
- });
- }
-}
-exports.DefaultGlobber = DefaultGlobber;
-//# sourceMappingURL=internal-globber.js.map
-
-/***/ }),
-
-/***/ 306:
+/***/ 866:
/***/ (function(module, __unusedexports, __webpack_require__) {
-var concatMap = __webpack_require__(896);
-var balanced = __webpack_require__(621);
-
-module.exports = expandTop;
-
-var escSlash = '\0SLASH'+Math.random()+'\0';
-var escOpen = '\0OPEN'+Math.random()+'\0';
-var escClose = '\0CLOSE'+Math.random()+'\0';
-var escComma = '\0COMMA'+Math.random()+'\0';
-var escPeriod = '\0PERIOD'+Math.random()+'\0';
-
-function numeric(str) {
- return parseInt(str, 10) == str
- ? parseInt(str, 10)
- : str.charCodeAt(0);
-}
-
-function escapeBraces(str) {
- return str.split('\\\\').join(escSlash)
- .split('\\{').join(escOpen)
- .split('\\}').join(escClose)
- .split('\\,').join(escComma)
- .split('\\.').join(escPeriod);
-}
-
-function unescapeBraces(str) {
- return str.split(escSlash).join('\\')
- .split(escOpen).join('{')
- .split(escClose).join('}')
- .split(escComma).join(',')
- .split(escPeriod).join('.');
-}
-
-
-// Basically just str.split(","), but handling cases
-// where we have nested braced sections, which should be
-// treated as individual members, like {a,{b,c},d}
-function parseCommaParts(str) {
- if (!str)
- return [''];
-
- var parts = [];
- var m = balanced('{', '}', str);
-
- if (!m)
- return str.split(',');
-
- var pre = m.pre;
- var body = m.body;
- var post = m.post;
- var p = pre.split(',');
-
- p[p.length-1] += '{' + body + '}';
- var postParts = parseCommaParts(post);
- if (post.length) {
- p[p.length-1] += postParts.shift();
- p.push.apply(p, postParts);
- }
-
- parts.push.apply(parts, p);
-
- return parts;
-}
-
-function expandTop(str) {
- if (!str)
- return [];
-
- // I don't know why Bash 4.3 does this, but it does.
- // Anything starting with {} will have the first two bytes preserved
- // but *only* at the top level, so {},a}b will not expand to anything,
- // but a{},b}c will be expanded to [a}c,abc].
- // One could argue that this is a bug in Bash, but since the goal of
- // this module is to match Bash's rules, we escape a leading {}
- if (str.substr(0, 2) === '{}') {
- str = '\\{\\}' + str.substr(2);
- }
-
- return expand(escapeBraces(str), true).map(unescapeBraces);
-}
-
-function identity(e) {
- return e;
-}
-
-function embrace(str) {
- return '{' + str + '}';
-}
-function isPadded(el) {
- return /^-?0\d/.test(el);
-}
-
-function lte(i, y) {
- return i <= y;
-}
-function gte(i, y) {
- return i >= y;
-}
-
-function expand(str, isTop) {
- var expansions = [];
-
- var m = balanced('{', '}', str);
- if (!m || /\$$/.test(m.pre)) return [str];
-
- var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
- var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
- var isSequence = isNumericSequence || isAlphaSequence;
- var isOptions = m.body.indexOf(',') >= 0;
- if (!isSequence && !isOptions) {
- // {a},b}
- if (m.post.match(/,.*\}/)) {
- str = m.pre + '{' + m.body + escClose + m.post;
- return expand(str);
- }
- return [str];
- }
-
- var n;
- if (isSequence) {
- n = m.body.split(/\.\./);
- } else {
- n = parseCommaParts(m.body);
- if (n.length === 1) {
- // x{{a,b}}y ==> x{a}y x{b}y
- n = expand(n[0], false).map(embrace);
- if (n.length === 1) {
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
- return post.map(function(p) {
- return m.pre + n[0] + p;
- });
- }
- }
- }
-
- // at this point, n is the parts, and we know it's not a comma set
- // with a single entry.
-
- // no need to expand pre, since it is guaranteed to be free of brace-sets
- var pre = m.pre;
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
-
- var N;
-
- if (isSequence) {
- var x = numeric(n[0]);
- var y = numeric(n[1]);
- var width = Math.max(n[0].length, n[1].length)
- var incr = n.length == 3
- ? Math.abs(numeric(n[2]))
- : 1;
- var test = lte;
- var reverse = y < x;
- if (reverse) {
- incr *= -1;
- test = gte;
- }
- var pad = n.some(isPadded);
-
- N = [];
-
- for (var i = x; test(i, y); i += incr) {
- var c;
- if (isAlphaSequence) {
- c = String.fromCharCode(i);
- if (c === '\\')
- c = '';
- } else {
- c = String(i);
- if (pad) {
- var need = width - c.length;
- if (need > 0) {
- var z = new Array(need + 1).join('0');
- if (i < 0)
- c = '-' + z + c.slice(1);
- else
- c = z + c;
- }
- }
- }
- N.push(c);
- }
- } else {
- N = concatMap(n, function(el) { return expand(el, false) });
- }
-
- for (var j = 0; j < N.length; j++) {
- for (var k = 0; k < post.length; k++) {
- var expansion = pre + N[j] + post[k];
- if (!isTop || isSequence || expansion)
- expansions.push(expansion);
- }
- }
-
- return expansions;
-}
-
-
-
-/***/ }),
-
-/***/ 327:
-/***/ (function(__unusedmodule, exports) {
-
"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-/**
- * Indicates whether a pattern matches a path
- */
-var MatchKind;
-(function (MatchKind) {
- /** Not matched */
- MatchKind[MatchKind["None"] = 0] = "None";
- /** Matched if the path is a directory */
- MatchKind[MatchKind["Directory"] = 1] = "Directory";
- /** Matched if the path is a regular file */
- MatchKind[MatchKind["File"] = 2] = "File";
- /** Matched */
- MatchKind[MatchKind["All"] = 3] = "All";
-})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
-//# sourceMappingURL=internal-match-kind.js.map
+var shebangRegex = __webpack_require__(816);
-/***/ }),
+module.exports = function (str) {
+ var match = str.match(shebangRegex);
-/***/ 342:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ if (!match) {
+ return null;
+ }
-"use strict";
+ var arr = match[0].replace(/#! ?/, '').split(' ');
+ var bin = arr[0].split('/').pop();
+ var arg = arr[1];
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+ return (bin === 'env' ?
+ arg :
+ bin + (arg ? ' ' + arg : '')
+ );
};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const http_client_1 = __webpack_require__(539);
-const auth_1 = __webpack_require__(226);
-const crypto = __importStar(__webpack_require__(417));
-const fs = __importStar(__webpack_require__(747));
-const stream = __importStar(__webpack_require__(794));
-const util = __importStar(__webpack_require__(669));
-const constants_1 = __webpack_require__(929);
-const utils = __importStar(__webpack_require__(867));
-const versionSalt = "1.0";
-function isSuccessStatusCode(statusCode) {
- if (!statusCode) {
- return false;
- }
- return statusCode >= 200 && statusCode < 300;
-}
-function isRetryableStatusCode(statusCode) {
- if (!statusCode) {
- return false;
- }
- const retryableStatusCodes = [
- http_client_1.HttpCodes.BadGateway,
- http_client_1.HttpCodes.ServiceUnavailable,
- http_client_1.HttpCodes.GatewayTimeout
- ];
- return retryableStatusCodes.includes(statusCode);
-}
-function getCacheApiUrl(resource) {
- // Ideally we just use ACTIONS_CACHE_URL
- const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
- process.env["ACTIONS_RUNTIME_URL"] ||
- "").replace("pipelines", "artifactcache");
- if (!baseUrl) {
- throw new Error("Cache Service Url not found, unable to restore cache.");
- }
- const url = `${baseUrl}_apis/artifactcache/${resource}`;
- core.debug(`Resource Url: ${url}`);
- return url;
-}
-function createAcceptHeader(type, apiVersion) {
- return `${type};api-version=${apiVersion}`;
-}
-function getRequestOptions() {
- const requestOptions = {
- headers: {
- Accept: createAcceptHeader("application/json", "6.0-preview.1")
- }
- };
- return requestOptions;
-}
-function createHttpClient() {
- const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
- const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
- return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
-}
-function getCacheVersion() {
- // Add salt to cache version to support breaking changes in cache entry
- const components = [
- core.getInput(constants_1.Inputs.Path, { required: true }),
- versionSalt
- ];
- return crypto
- .createHash("sha256")
- .update(components.join("|"))
- .digest("hex");
-}
-exports.getCacheVersion = getCacheVersion;
-function getCacheEntry(keys) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion();
- const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
- const response = yield httpClient.getJson(getCacheApiUrl(resource));
- if (response.statusCode === 204) {
- return null;
- }
- if (!isSuccessStatusCode(response.statusCode)) {
- throw new Error(`Cache service responded with ${response.statusCode}`);
- }
- const cacheResult = response.result;
- const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
- if (!cacheDownloadUrl) {
- throw new Error("Cache not found.");
- }
- core.setSecret(cacheDownloadUrl);
- core.debug(`Cache Result:`);
- core.debug(JSON.stringify(cacheResult));
- return cacheResult;
- });
-}
-exports.getCacheEntry = getCacheEntry;
-function pipeResponseToStream(response, output) {
- return __awaiter(this, void 0, void 0, function* () {
- const pipeline = util.promisify(stream.pipeline);
- yield pipeline(response.message, output);
- });
-}
-function downloadCache(archiveLocation, archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- const stream = fs.createWriteStream(archivePath);
- const httpClient = new http_client_1.HttpClient("actions/cache");
- const downloadResponse = yield httpClient.get(archiveLocation);
- // Abort download if no traffic received over the socket.
- downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
- downloadResponse.message.destroy();
- core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
- });
- yield pipeResponseToStream(downloadResponse, stream);
- // Validate download size.
- const contentLengthHeader = downloadResponse.message.headers["content-length"];
- if (contentLengthHeader) {
- const expectedLength = parseInt(contentLengthHeader);
- const actualLength = utils.getArchiveFileSize(archivePath);
- if (actualLength != expectedLength) {
- throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
- }
- }
- else {
- core.debug("Unable to validate download, no Content-Length header");
- }
- });
-}
-exports.downloadCache = downloadCache;
-// Reserve Cache
-function reserveCache(key) {
- var _a, _b;
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion();
- const reserveCacheRequest = {
- key,
- version
- };
- const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
- return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
- });
-}
-exports.reserveCache = reserveCache;
-function getContentRange(start, end) {
- // Format: `bytes start-end/filesize
- // start and end are inclusive
- // filesize can be *
- // For a 200 byte chunk starting at byte 0:
- // Content-Range: bytes 0-199/*
- return `bytes ${start}-${end}/*`;
-}
-function uploadChunk(httpClient, resourceUrl, data, start, end) {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug(`Uploading chunk of size ${end -
- start +
- 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
- const additionalHeaders = {
- "Content-Type": "application/octet-stream",
- "Content-Range": getContentRange(start, end)
- };
- const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
- return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
- });
- const response = yield uploadChunkRequest();
- if (isSuccessStatusCode(response.message.statusCode)) {
- return;
- }
- if (isRetryableStatusCode(response.message.statusCode)) {
- core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
- const retryResponse = yield uploadChunkRequest();
- if (isSuccessStatusCode(retryResponse.message.statusCode)) {
- return;
- }
- }
- throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
- });
-}
-function parseEnvNumber(key) {
- const value = Number(process.env[key]);
- if (Number.isNaN(value) || value < 0) {
- return undefined;
- }
- return value;
-}
-function uploadFile(httpClient, cacheId, archivePath) {
- var _a, _b;
- return __awaiter(this, void 0, void 0, function* () {
- // Upload Chunks
- const fileSize = fs.statSync(archivePath).size;
- const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
- const fd = fs.openSync(archivePath, "r");
- const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY")) !== null && _a !== void 0 ? _a : 4; // # of HTTP requests in parallel
- const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE")) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024; // 32 MB Chunks
- core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
- const parallelUploads = [...new Array(concurrency).keys()];
- core.debug("Awaiting all uploads");
- let offset = 0;
- try {
- yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
- while (offset < fileSize) {
- const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
- const start = offset;
- const end = offset + chunkSize - 1;
- offset += MAX_CHUNK_SIZE;
- const chunk = fs.createReadStream(archivePath, {
- fd,
- start,
- end,
- autoClose: false
- });
- yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
- }
- })));
- }
- finally {
- fs.closeSync(fd);
- }
- return;
- });
-}
-function commitCache(httpClient, cacheId, filesize) {
- return __awaiter(this, void 0, void 0, function* () {
- const commitCacheRequest = { size: filesize };
- return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
- });
-}
-function saveCache(cacheId, archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- core.debug("Upload cache");
- yield uploadFile(httpClient, cacheId, archivePath);
- // Commit Cache
- core.debug("Commiting cache");
- const cacheSize = utils.getArchiveFileSize(archivePath);
- const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
- if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
- throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
- }
- core.info("Cache saved successfully");
- });
-}
-exports.saveCache = saveCache;
-/***/ }),
-
-/***/ 357:
-/***/ (function(module) {
-
-module.exports = require("assert");
-
-/***/ }),
-
-/***/ 383:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const assert = __webpack_require__(357);
-const path = __webpack_require__(622);
-const pathHelper = __webpack_require__(972);
-const IS_WINDOWS = process.platform === 'win32';
-/**
- * Helper class for parsing paths into segments
- */
-class Path {
- /**
- * Constructs a Path
- * @param itemPath Path or array of segments
- */
- constructor(itemPath) {
- this.segments = [];
- // String
- if (typeof itemPath === 'string') {
- assert(itemPath, `Parameter 'itemPath' must not be empty`);
- // Normalize slashes and trim unnecessary trailing slash
- itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
- // Not rooted
- if (!pathHelper.hasRoot(itemPath)) {
- this.segments = itemPath.split(path.sep);
- }
- // Rooted
- else {
- // Add all segments, while not at the root
- let remaining = itemPath;
- let dir = pathHelper.dirname(remaining);
- while (dir !== remaining) {
- // Add the segment
- const basename = path.basename(remaining);
- this.segments.unshift(basename);
- // Truncate the last segment
- remaining = dir;
- dir = pathHelper.dirname(remaining);
- }
- // Remainder is the root
- this.segments.unshift(remaining);
- }
- }
- // Array
- else {
- // Must not be empty
- assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
- // Each segment
- for (let i = 0; i < itemPath.length; i++) {
- let segment = itemPath[i];
- // Must not be empty
- assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
- // Normalize slashes
- segment = pathHelper.normalizeSeparators(itemPath[i]);
- // Root segment
- if (i === 0 && pathHelper.hasRoot(segment)) {
- segment = pathHelper.safeTrimTrailingSeparator(segment);
- assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
- this.segments.push(segment);
- }
- // All other segments
- else {
- // Must not contain slash
- assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
- this.segments.push(segment);
- }
- }
- }
- }
- /**
- * Converts the path to it's string representation
- */
- toString() {
- // First segment
- let result = this.segments[0];
- // All others
- let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
- for (let i = 1; i < this.segments.length; i++) {
- if (skipSlash) {
- skipSlash = false;
- }
- else {
- result += path.sep;
- }
- result += this.segments[i];
- }
- return result;
- }
-}
-exports.Path = Path;
-//# sourceMappingURL=internal-path.js.map
-
-/***/ }),
-
-/***/ 413:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-module.exports = __webpack_require__(141);
-
-
-/***/ }),
-
-/***/ 417:
-/***/ (function(module) {
-
-module.exports = require("crypto");
-
-/***/ }),
-
-/***/ 431:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const os = __importStar(__webpack_require__(87));
-/**
- * Commands
- *
- * Command Format:
- * ::name key=value,key=value::message
- *
- * Examples:
- * ::warning::This is the message
- * ::set-env name=MY_VAR::some value
- */
-function issueCommand(command, properties, message) {
- const cmd = new Command(command, properties, message);
- process.stdout.write(cmd.toString() + os.EOL);
-}
-exports.issueCommand = issueCommand;
-function issue(name, message = '') {
- issueCommand(name, {}, message);
-}
-exports.issue = issue;
-const CMD_STRING = '::';
-class Command {
- constructor(command, properties, message) {
- if (!command) {
- command = 'missing.command';
- }
- this.command = command;
- this.properties = properties;
- this.message = message;
- }
- toString() {
- let cmdStr = CMD_STRING + this.command;
- if (this.properties && Object.keys(this.properties).length > 0) {
- cmdStr += ' ';
- let first = true;
- for (const key in this.properties) {
- if (this.properties.hasOwnProperty(key)) {
- const val = this.properties[key];
- if (val) {
- if (first) {
- first = false;
- }
- else {
- cmdStr += ',';
- }
- cmdStr += `${key}=${escapeProperty(val)}`;
- }
- }
- }
- }
- cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
- return cmdStr;
- }
-}
-/**
- * Sanitizes an input into a string so it can be passed into issueCommand safely
- * @param input input to sanitize into a string
- */
-function toCommandValue(input) {
- if (input === null || input === undefined) {
- return '';
- }
- else if (typeof input === 'string' || input instanceof String) {
- return input;
- }
- return JSON.stringify(input);
-}
-exports.toCommandValue = toCommandValue;
-function escapeData(s) {
- return toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A');
-}
-function escapeProperty(s) {
- return toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A')
- .replace(/:/g, '%3A')
- .replace(/,/g, '%2C');
-}
-//# sourceMappingURL=command.js.map
-
-/***/ }),
-
-/***/ 470:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const command_1 = __webpack_require__(431);
-const os = __importStar(__webpack_require__(87));
-const path = __importStar(__webpack_require__(622));
-/**
- * The code to exit an action
- */
-var ExitCode;
-(function (ExitCode) {
- /**
- * A code indicating that the action was successful
- */
- ExitCode[ExitCode["Success"] = 0] = "Success";
- /**
- * A code indicating that the action was a failure
- */
- ExitCode[ExitCode["Failure"] = 1] = "Failure";
-})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
-//-----------------------------------------------------------------------
-// Variables
-//-----------------------------------------------------------------------
-/**
- * Sets env variable for this action and future actions in the job
- * @param name the name of the variable to set
- * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function exportVariable(name, val) {
- const convertedVal = command_1.toCommandValue(val);
- process.env[name] = convertedVal;
- command_1.issueCommand('set-env', { name }, convertedVal);
-}
-exports.exportVariable = exportVariable;
-/**
- * Registers a secret which will get masked from logs
- * @param secret value of the secret
- */
-function setSecret(secret) {
- command_1.issueCommand('add-mask', {}, secret);
-}
-exports.setSecret = setSecret;
-/**
- * Prepends inputPath to the PATH (for this action and future actions)
- * @param inputPath
- */
-function addPath(inputPath) {
- command_1.issueCommand('add-path', {}, inputPath);
- process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
-}
-exports.addPath = addPath;
-/**
- * Gets the value of an input. The value is also trimmed.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string
- */
-function getInput(name, options) {
- const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
- if (options && options.required && !val) {
- throw new Error(`Input required and not supplied: ${name}`);
- }
- return val.trim();
-}
-exports.getInput = getInput;
-/**
- * Sets the value of an output.
- *
- * @param name name of the output to set
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function setOutput(name, value) {
- command_1.issueCommand('set-output', { name }, value);
-}
-exports.setOutput = setOutput;
-/**
- * Enables or disables the echoing of commands into stdout for the rest of the step.
- * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
- *
- */
-function setCommandEcho(enabled) {
- command_1.issue('echo', enabled ? 'on' : 'off');
-}
-exports.setCommandEcho = setCommandEcho;
-//-----------------------------------------------------------------------
-// Results
-//-----------------------------------------------------------------------
-/**
- * Sets the action status to failed.
- * When the action exits it will be with an exit code of 1
- * @param message add error issue message
- */
-function setFailed(message) {
- process.exitCode = ExitCode.Failure;
- error(message);
-}
-exports.setFailed = setFailed;
-//-----------------------------------------------------------------------
-// Logging Commands
-//-----------------------------------------------------------------------
-/**
- * Gets whether Actions Step Debug is on or not
- */
-function isDebug() {
- return process.env['RUNNER_DEBUG'] === '1';
-}
-exports.isDebug = isDebug;
-/**
- * Writes debug message to user log
- * @param message debug message
- */
-function debug(message) {
- command_1.issueCommand('debug', {}, message);
-}
-exports.debug = debug;
-/**
- * Adds an error issue
- * @param message error issue message. Errors will be converted to string via toString()
- */
-function error(message) {
- command_1.issue('error', message instanceof Error ? message.toString() : message);
-}
-exports.error = error;
-/**
- * Adds an warning issue
- * @param message warning issue message. Errors will be converted to string via toString()
- */
-function warning(message) {
- command_1.issue('warning', message instanceof Error ? message.toString() : message);
-}
-exports.warning = warning;
-/**
- * Writes info to log with console.log.
- * @param message info message
- */
-function info(message) {
- process.stdout.write(message + os.EOL);
-}
-exports.info = info;
-/**
- * Begin an output group.
- *
- * Output until the next `groupEnd` will be foldable in this group
- *
- * @param name The name of the output group
- */
-function startGroup(name) {
- command_1.issue('group', name);
-}
-exports.startGroup = startGroup;
-/**
- * End an output group.
- */
-function endGroup() {
- command_1.issue('endgroup');
-}
-exports.endGroup = endGroup;
-/**
- * Wrap an asynchronous function call in a group.
- *
- * Returns the same type as the function itself.
- *
- * @param name The name of the group
- * @param fn The function to wrap in the group
- */
-function group(name, fn) {
- return __awaiter(this, void 0, void 0, function* () {
- startGroup(name);
- let result;
- try {
- result = yield fn();
- }
- finally {
- endGroup();
- }
- return result;
- });
-}
-exports.group = group;
-//-----------------------------------------------------------------------
-// Wrapper action state
-//-----------------------------------------------------------------------
-/**
- * Saves state for current action, the state can only be retrieved by this action's post job execution.
- *
- * @param name name of the state to store
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function saveState(name, value) {
- command_1.issueCommand('save-state', { name }, value);
-}
-exports.saveState = saveState;
-/**
- * Gets the value of an state set by this action's main execution.
- *
- * @param name name of the state to get
- * @returns string
- */
-function getState(name) {
- return process.env[`STATE_${name}`] || '';
-}
-exports.getState = getState;
-//# sourceMappingURL=core.js.map
-
-/***/ }),
-
-/***/ 514:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const io = __importStar(__webpack_require__(1));
-const tc = __importStar(__webpack_require__(533));
-const installer = __importStar(__webpack_require__(634));
-const path_1 = __importDefault(__webpack_require__(622));
-const child_process_1 = __importDefault(__webpack_require__(129));
-const fs_1 = __importDefault(__webpack_require__(747));
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- //
- // versionSpec is optional. If supplied, install / use from the tool cache
- // If not supplied then problem matchers will still be setup. Useful for self-hosted.
- //
- let versionSpec = core.getInput('go-version');
- // stable will be true unless false is the exact input
- // since getting unstable versions should be explicit
- let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
- console.log(`Setup go ${stable ? 'stable' : ''} version spec ${versionSpec}`);
- if (versionSpec) {
- let installDir = tc.find('go', versionSpec);
- if (!installDir) {
- console.log(`A version satisfying ${versionSpec} not found locally, attempting to download ...`);
- installDir = yield installer.downloadGo(versionSpec, stable);
- console.log('Installed');
- }
- if (installDir) {
- core.exportVariable('GOROOT', installDir);
- core.addPath(path_1.default.join(installDir, 'bin'));
- console.log('Added go to the path');
- let added = addBinToPath();
- core.debug(`add bin ${added}`);
- }
- else {
- throw new Error(`Could not find a version that satisfied version spec: ${versionSpec}`);
- }
- }
- // add problem matchers
- const matchersPath = path_1.default.join(__dirname, '..', 'matchers.json');
- console.log(`##[add-matcher]${matchersPath}`);
- // output the version actually being used
- let goPath = yield io.which('go');
- let goVersion = (child_process_1.default.execSync(`${goPath} version`) || '').toString();
- console.log(goVersion);
- core.startGroup('go env');
- let goEnv = (child_process_1.default.execSync(`${goPath} env`) || '').toString();
- console.log(goEnv);
- core.endGroup();
- }
- catch (error) {
- core.setFailed(error.message);
- }
- });
-}
-exports.run = run;
-function addBinToPath() {
- return __awaiter(this, void 0, void 0, function* () {
- let added = false;
- let g = yield io.which('go');
- _debug(`which go :${g}:`);
- if (!g) {
- _debug('go not in the path');
- return added;
- }
- let buf = child_process_1.default.execSync('go env GOPATH');
- if (buf) {
- let gp = buf.toString().trim();
- _debug(`go env GOPATH :${gp}:`);
- if (!fs_1.default.existsSync(gp)) {
- // some of the hosted images have go install but not profile dir
- _debug(`creating ${gp}`);
- io.mkdirP(gp);
- }
- let bp = path_1.default.join(gp, 'bin');
- if (!fs_1.default.existsSync(bp)) {
- _debug(`creating ${bp}`);
- io.mkdirP(bp);
- }
- core.addPath(bp);
- added = true;
- }
- return added;
- });
-}
-exports.addBinToPath = addBinToPath;
-function _debug(message) {
- core.debug(message);
-}
-exports._debug = _debug;
-
-
-/***/ }),
-
-/***/ 533:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const io = __importStar(__webpack_require__(1));
-const fs = __importStar(__webpack_require__(747));
-const os = __importStar(__webpack_require__(87));
-const path = __importStar(__webpack_require__(622));
-const httpm = __importStar(__webpack_require__(539));
-const semver = __importStar(__webpack_require__(280));
-const stream = __importStar(__webpack_require__(794));
-const util = __importStar(__webpack_require__(669));
-const v4_1 = __importDefault(__webpack_require__(826));
-const exec_1 = __webpack_require__(986);
-const assert_1 = __webpack_require__(357);
-const retry_helper_1 = __webpack_require__(979);
-class HTTPError extends Error {
- constructor(httpStatusCode) {
- super(`Unexpected HTTP response: ${httpStatusCode}`);
- this.httpStatusCode = httpStatusCode;
- Object.setPrototypeOf(this, new.target.prototype);
- }
-}
-exports.HTTPError = HTTPError;
-const IS_WINDOWS = process.platform === 'win32';
-const userAgent = 'actions/tool-cache';
-/**
- * Download a tool from an url and stream it into a file
- *
- * @param url url of tool to download
- * @param dest path to download tool
- * @returns path to downloaded tool
- */
-function downloadTool(url, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- dest = dest || path.join(_getTempDirectory(), v4_1.default());
- yield io.mkdirP(path.dirname(dest));
- core.debug(`Downloading ${url}`);
- core.debug(`Destination ${dest}`);
- const maxAttempts = 3;
- const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
- const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
- const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
- return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
- return yield downloadToolAttempt(url, dest || '');
- }), (err) => {
- if (err instanceof HTTPError && err.httpStatusCode) {
- // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
- if (err.httpStatusCode < 500 &&
- err.httpStatusCode !== 408 &&
- err.httpStatusCode !== 429) {
- return false;
- }
- }
- // Otherwise retry
- return true;
- });
- });
-}
-exports.downloadTool = downloadTool;
-function downloadToolAttempt(url, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (fs.existsSync(dest)) {
- throw new Error(`Destination file path ${dest} already exists`);
- }
- // Get the response headers
- const http = new httpm.HttpClient(userAgent, [], {
- allowRetries: false
- });
- const response = yield http.get(url);
- if (response.message.statusCode !== 200) {
- const err = new HTTPError(response.message.statusCode);
- core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
- throw err;
- }
- // Download the response body
- const pipeline = util.promisify(stream.pipeline);
- const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
- const readStream = responseMessageFactory();
- let succeeded = false;
- try {
- yield pipeline(readStream, fs.createWriteStream(dest));
- core.debug('download complete');
- succeeded = true;
- return dest;
- }
- finally {
- // Error, delete dest before retry
- if (!succeeded) {
- core.debug('download failed');
- try {
- yield io.rmRF(dest);
- }
- catch (err) {
- core.debug(`Failed to delete '${dest}'. ${err.message}`);
- }
- }
- }
- });
-}
-/**
- * Extract a .7z file
- *
- * @param file path to the .7z file
- * @param dest destination directory. Optional.
- * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
- * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
- * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
- * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
- * interface, it is smaller than the full command line interface, and it does support long paths. At the
- * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
- * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
- * to 7zr.exe can be pass to this function.
- * @returns path to the destination directory
- */
-function extract7z(file, dest, _7zPath) {
- return __awaiter(this, void 0, void 0, function* () {
- assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
- assert_1.ok(file, 'parameter "file" is required');
- dest = yield _createExtractFolder(dest);
- const originalCwd = process.cwd();
- process.chdir(dest);
- if (_7zPath) {
- try {
- const args = [
- 'x',
- '-bb1',
- '-bd',
- '-sccUTF-8',
- file
- ];
- const options = {
- silent: true
- };
- yield exec_1.exec(`"${_7zPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- else {
- const escapedScript = path
- .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
- .replace(/'/g, "''")
- .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- const options = {
- silent: true
- };
- try {
- const powershellPath = yield io.which('powershell', true);
- yield exec_1.exec(`"${powershellPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- return dest;
- });
-}
-exports.extract7z = extract7z;
-/**
- * Extract a compressed tar archive
- *
- * @param file path to the tar
- * @param dest destination directory. Optional.
- * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
- * @returns path to the destination directory
- */
-function extractTar(file, dest, flags = 'xz') {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- // Create dest
- dest = yield _createExtractFolder(dest);
- // Determine whether GNU tar
- core.debug('Checking tar --version');
- let versionOutput = '';
- yield exec_1.exec('tar --version', [], {
- ignoreReturnCode: true,
- silent: true,
- listeners: {
- stdout: (data) => (versionOutput += data.toString()),
- stderr: (data) => (versionOutput += data.toString())
- }
- });
- core.debug(versionOutput.trim());
- const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
- // Initialize args
- const args = [flags];
- let destArg = dest;
- let fileArg = file;
- if (IS_WINDOWS && isGnuTar) {
- args.push('--force-local');
- destArg = dest.replace(/\\/g, '/');
- // Technically only the dest needs to have `/` but for aesthetic consistency
- // convert slashes in the file arg too.
- fileArg = file.replace(/\\/g, '/');
- }
- if (isGnuTar) {
- // Suppress warnings when using GNU tar to extract archives created by BSD tar
- args.push('--warning=no-unknown-keyword');
- }
- args.push('-C', destArg, '-f', fileArg);
- yield exec_1.exec(`tar`, args);
- return dest;
- });
-}
-exports.extractTar = extractTar;
-/**
- * Extract a zip
- *
- * @param file path to the zip
- * @param dest destination directory. Optional.
- * @returns path to the destination directory
- */
-function extractZip(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- dest = yield _createExtractFolder(dest);
- if (IS_WINDOWS) {
- yield extractZipWin(file, dest);
- }
- else {
- yield extractZipNix(file, dest);
- }
- return dest;
- });
-}
-exports.extractZip = extractZip;
-function extractZipWin(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- // build the powershell command
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
- // run powershell
- const powershellPath = yield io.which('powershell');
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- yield exec_1.exec(`"${powershellPath}"`, args);
- });
-}
-function extractZipNix(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- const unzipPath = yield io.which('unzip');
- yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
- });
-}
-/**
- * Caches a directory and installs it into the tool cacheDir
- *
- * @param sourceDir the directory to cache into tools
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheDir(sourceDir, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source dir: ${sourceDir}`);
- if (!fs.statSync(sourceDir).isDirectory()) {
- throw new Error('sourceDir is not a directory');
- }
- // Create the tool dir
- const destPath = yield _createToolPath(tool, version, arch);
- // copy each child item. do not move. move can fail on Windows
- // due to anti-virus software having an open handle on a file.
- for (const itemName of fs.readdirSync(sourceDir)) {
- const s = path.join(sourceDir, itemName);
- yield io.cp(s, destPath, { recursive: true });
- }
- // write .complete
- _completeToolPath(tool, version, arch);
- return destPath;
- });
-}
-exports.cacheDir = cacheDir;
-/**
- * Caches a downloaded file (GUID) and installs it
- * into the tool cache with a given targetName
- *
- * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
- * @param targetFile the name of the file name in the tools directory
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheFile(sourceFile, targetFile, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source file: ${sourceFile}`);
- if (!fs.statSync(sourceFile).isFile()) {
- throw new Error('sourceFile is not a file');
- }
- // create the tool dir
- const destFolder = yield _createToolPath(tool, version, arch);
- // copy instead of move. move can fail on Windows due to
- // anti-virus software having an open handle on a file.
- const destPath = path.join(destFolder, targetFile);
- core.debug(`destination file ${destPath}`);
- yield io.cp(sourceFile, destPath);
- // write .complete
- _completeToolPath(tool, version, arch);
- return destFolder;
- });
-}
-exports.cacheFile = cacheFile;
-/**
- * Finds the path to a tool version in the local installed tool cache
- *
- * @param toolName name of the tool
- * @param versionSpec version of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function find(toolName, versionSpec, arch) {
- if (!toolName) {
- throw new Error('toolName parameter is required');
- }
- if (!versionSpec) {
- throw new Error('versionSpec parameter is required');
- }
- arch = arch || os.arch();
- // attempt to resolve an explicit version
- if (!_isExplicitVersion(versionSpec)) {
- const localVersions = findAllVersions(toolName, arch);
- const match = _evaluateVersions(localVersions, versionSpec);
- versionSpec = match;
- }
- // check for the explicit version in the cache
- let toolPath = '';
- if (versionSpec) {
- versionSpec = semver.clean(versionSpec) || '';
- const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
- core.debug(`checking cache: ${cachePath}`);
- if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
- core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
- toolPath = cachePath;
- }
- else {
- core.debug('not found');
- }
- }
- return toolPath;
-}
-exports.find = find;
-/**
- * Finds the paths to all versions of a tool that are installed in the local tool cache
- *
- * @param toolName name of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function findAllVersions(toolName, arch) {
- const versions = [];
- arch = arch || os.arch();
- const toolPath = path.join(_getCacheDirectory(), toolName);
- if (fs.existsSync(toolPath)) {
- const children = fs.readdirSync(toolPath);
- for (const child of children) {
- if (_isExplicitVersion(child)) {
- const fullPath = path.join(toolPath, child, arch || '');
- if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
- versions.push(child);
- }
- }
- }
- }
- return versions;
-}
-exports.findAllVersions = findAllVersions;
-function _createExtractFolder(dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!dest) {
- // create a temp dir
- dest = path.join(_getTempDirectory(), v4_1.default());
- }
- yield io.mkdirP(dest);
- return dest;
- });
-}
-function _createToolPath(tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
- core.debug(`destination ${folderPath}`);
- const markerPath = `${folderPath}.complete`;
- yield io.rmRF(folderPath);
- yield io.rmRF(markerPath);
- yield io.mkdirP(folderPath);
- return folderPath;
- });
-}
-function _completeToolPath(tool, version, arch) {
- const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
- const markerPath = `${folderPath}.complete`;
- fs.writeFileSync(markerPath, '');
- core.debug('finished caching tool');
-}
-function _isExplicitVersion(versionSpec) {
- const c = semver.clean(versionSpec) || '';
- core.debug(`isExplicit: ${c}`);
- const valid = semver.valid(c) != null;
- core.debug(`explicit? ${valid}`);
- return valid;
-}
-function _evaluateVersions(versions, versionSpec) {
- let version = '';
- core.debug(`evaluating ${versions.length} versions`);
- versions = versions.sort((a, b) => {
- if (semver.gt(a, b)) {
- return 1;
- }
- return -1;
- });
- for (let i = versions.length - 1; i >= 0; i--) {
- const potential = versions[i];
- const satisfied = semver.satisfies(potential, versionSpec);
- if (satisfied) {
- version = potential;
- break;
- }
- }
- if (version) {
- core.debug(`matched: ${version}`);
- }
- else {
- core.debug('match not found');
- }
- return version;
-}
-/**
- * Gets RUNNER_TOOL_CACHE
- */
-function _getCacheDirectory() {
- const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
- assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
- return cacheDirectory;
-}
-/**
- * Gets RUNNER_TEMP
- */
-function _getTempDirectory() {
- const tempDirectory = process.env['RUNNER_TEMP'] || '';
- assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
- return tempDirectory;
-}
-/**
- * Gets a global variable
- */
-function _getGlobal(key, defaultValue) {
- /* eslint-disable @typescript-eslint/no-explicit-any */
- const value = global[key];
- /* eslint-enable @typescript-eslint/no-explicit-any */
- return value !== undefined ? value : defaultValue;
-}
-//# sourceMappingURL=tool-cache.js.map
-
-/***/ }),
-
-/***/ 539:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const url = __webpack_require__(835);
-const http = __webpack_require__(605);
-const https = __webpack_require__(211);
-const pm = __webpack_require__(950);
-let tunnel;
-var HttpCodes;
-(function (HttpCodes) {
- HttpCodes[HttpCodes["OK"] = 200] = "OK";
- HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
- HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
- HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
- HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
- HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
- HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
- HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
- HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
- HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
- HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
- HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
- HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
- HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
- HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
- HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
- HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
- HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
- HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
- HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
- HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
- HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
- HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
- HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
- HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
- HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
- HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
-var Headers;
-(function (Headers) {
- Headers["Accept"] = "accept";
- Headers["ContentType"] = "content-type";
-})(Headers = exports.Headers || (exports.Headers = {}));
-var MediaTypes;
-(function (MediaTypes) {
- MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
-/**
- * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
-function getProxyUrl(serverUrl) {
- let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
- return proxyUrl ? proxyUrl.href : '';
-}
-exports.getProxyUrl = getProxyUrl;
-const HttpRedirectCodes = [
- HttpCodes.MovedPermanently,
- HttpCodes.ResourceMoved,
- HttpCodes.SeeOther,
- HttpCodes.TemporaryRedirect,
- HttpCodes.PermanentRedirect
-];
-const HttpResponseRetryCodes = [
- HttpCodes.BadGateway,
- HttpCodes.ServiceUnavailable,
- HttpCodes.GatewayTimeout
-];
-const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
-const ExponentialBackoffCeiling = 10;
-const ExponentialBackoffTimeSlice = 5;
-class HttpClientResponse {
- constructor(message) {
- this.message = message;
- }
- readBody() {
- return new Promise(async (resolve, reject) => {
- let output = Buffer.alloc(0);
- this.message.on('data', (chunk) => {
- output = Buffer.concat([output, chunk]);
- });
- this.message.on('end', () => {
- resolve(output.toString());
- });
- });
- }
-}
-exports.HttpClientResponse = HttpClientResponse;
-function isHttps(requestUrl) {
- let parsedUrl = url.parse(requestUrl);
- return parsedUrl.protocol === 'https:';
-}
-exports.isHttps = isHttps;
-class HttpClient {
- constructor(userAgent, handlers, requestOptions) {
- this._ignoreSslError = false;
- this._allowRedirects = true;
- this._allowRedirectDowngrade = false;
- this._maxRedirects = 50;
- this._allowRetries = false;
- this._maxRetries = 1;
- this._keepAlive = false;
- this._disposed = false;
- this.userAgent = userAgent;
- this.handlers = handlers || [];
- this.requestOptions = requestOptions;
- if (requestOptions) {
- if (requestOptions.ignoreSslError != null) {
- this._ignoreSslError = requestOptions.ignoreSslError;
- }
- this._socketTimeout = requestOptions.socketTimeout;
- if (requestOptions.allowRedirects != null) {
- this._allowRedirects = requestOptions.allowRedirects;
- }
- if (requestOptions.allowRedirectDowngrade != null) {
- this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
- }
- if (requestOptions.maxRedirects != null) {
- this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
- }
- if (requestOptions.keepAlive != null) {
- this._keepAlive = requestOptions.keepAlive;
- }
- if (requestOptions.allowRetries != null) {
- this._allowRetries = requestOptions.allowRetries;
- }
- if (requestOptions.maxRetries != null) {
- this._maxRetries = requestOptions.maxRetries;
- }
- }
- }
- options(requestUrl, additionalHeaders) {
- return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
- }
- get(requestUrl, additionalHeaders) {
- return this.request('GET', requestUrl, null, additionalHeaders || {});
- }
- del(requestUrl, additionalHeaders) {
- return this.request('DELETE', requestUrl, null, additionalHeaders || {});
- }
- post(requestUrl, data, additionalHeaders) {
- return this.request('POST', requestUrl, data, additionalHeaders || {});
- }
- patch(requestUrl, data, additionalHeaders) {
- return this.request('PATCH', requestUrl, data, additionalHeaders || {});
- }
- put(requestUrl, data, additionalHeaders) {
- return this.request('PUT', requestUrl, data, additionalHeaders || {});
- }
- head(requestUrl, additionalHeaders) {
- return this.request('HEAD', requestUrl, null, additionalHeaders || {});
- }
- sendStream(verb, requestUrl, stream, additionalHeaders) {
- return this.request(verb, requestUrl, stream, additionalHeaders);
- }
- /**
- * Gets a typed object from an endpoint
- * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
- */
- async getJson(requestUrl, additionalHeaders = {}) {
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- let res = await this.get(requestUrl, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async postJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.post(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async putJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.put(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- async patchJson(requestUrl, obj, additionalHeaders = {}) {
- let data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- let res = await this.patch(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- }
- /**
- * Makes a raw http request.
- * All other methods such as get, post, patch, and request ultimately call this.
- * Prefer get, del, post and patch
- */
- async request(verb, requestUrl, data, headers) {
- if (this._disposed) {
- throw new Error('Client has already been disposed.');
- }
- let parsedUrl = url.parse(requestUrl);
- let info = this._prepareRequest(verb, parsedUrl, headers);
- // Only perform retries on reads since writes may not be idempotent.
- let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
- ? this._maxRetries + 1
- : 1;
- let numTries = 0;
- let response;
- while (numTries < maxTries) {
- response = await this.requestRaw(info, data);
- // Check if it's an authentication challenge
- if (response &&
- response.message &&
- response.message.statusCode === HttpCodes.Unauthorized) {
- let authenticationHandler;
- for (let i = 0; i < this.handlers.length; i++) {
- if (this.handlers[i].canHandleAuthentication(response)) {
- authenticationHandler = this.handlers[i];
- break;
- }
- }
- if (authenticationHandler) {
- return authenticationHandler.handleAuthentication(this, info, data);
- }
- else {
- // We have received an unauthorized response but have no handlers to handle it.
- // Let the response return to the caller.
- return response;
- }
- }
- let redirectsRemaining = this._maxRedirects;
- while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
- this._allowRedirects &&
- redirectsRemaining > 0) {
- const redirectUrl = response.message.headers['location'];
- if (!redirectUrl) {
- // if there's no location to redirect to, we won't
- break;
- }
- let parsedRedirectUrl = url.parse(redirectUrl);
- if (parsedUrl.protocol == 'https:' &&
- parsedUrl.protocol != parsedRedirectUrl.protocol &&
- !this._allowRedirectDowngrade) {
- throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
- }
- // we need to finish reading the response before reassigning response
- // which will leak the open socket.
- await response.readBody();
- // strip authorization header if redirected to a different hostname
- if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
- for (let header in headers) {
- // header names are case insensitive
- if (header.toLowerCase() === 'authorization') {
- delete headers[header];
- }
- }
- }
- // let's make the request with the new redirectUrl
- info = this._prepareRequest(verb, parsedRedirectUrl, headers);
- response = await this.requestRaw(info, data);
- redirectsRemaining--;
- }
- if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
- // If not a retry code, return immediately instead of retrying
- return response;
- }
- numTries += 1;
- if (numTries < maxTries) {
- await response.readBody();
- await this._performExponentialBackoff(numTries);
- }
- }
- return response;
- }
- /**
- * Needs to be called if keepAlive is set to true in request options.
- */
- dispose() {
- if (this._agent) {
- this._agent.destroy();
- }
- this._disposed = true;
- }
- /**
- * Raw request.
- * @param info
- * @param data
- */
- requestRaw(info, data) {
- return new Promise((resolve, reject) => {
- let callbackForResult = function (err, res) {
- if (err) {
- reject(err);
- }
- resolve(res);
- };
- this.requestRawWithCallback(info, data, callbackForResult);
- });
- }
- /**
- * Raw request with callback.
- * @param info
- * @param data
- * @param onResult
- */
- requestRawWithCallback(info, data, onResult) {
- let socket;
- if (typeof data === 'string') {
- info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
- }
- let callbackCalled = false;
- let handleResult = (err, res) => {
- if (!callbackCalled) {
- callbackCalled = true;
- onResult(err, res);
- }
- };
- let req = info.httpModule.request(info.options, (msg) => {
- let res = new HttpClientResponse(msg);
- handleResult(null, res);
- });
- req.on('socket', sock => {
- socket = sock;
- });
- // If we ever get disconnected, we want the socket to timeout eventually
- req.setTimeout(this._socketTimeout || 3 * 60000, () => {
- if (socket) {
- socket.end();
- }
- handleResult(new Error('Request timeout: ' + info.options.path), null);
- });
- req.on('error', function (err) {
- // err has statusCode property
- // res should have headers
- handleResult(err, null);
- });
- if (data && typeof data === 'string') {
- req.write(data, 'utf8');
- }
- if (data && typeof data !== 'string') {
- data.on('close', function () {
- req.end();
- });
- data.pipe(req);
- }
- else {
- req.end();
- }
- }
- /**
- * Gets an http agent. This function is useful when you need an http agent that handles
- * routing through a proxy server - depending upon the url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
- getAgent(serverUrl) {
- let parsedUrl = url.parse(serverUrl);
- return this._getAgent(parsedUrl);
- }
- _prepareRequest(method, requestUrl, headers) {
- const info = {};
- info.parsedUrl = requestUrl;
- const usingSsl = info.parsedUrl.protocol === 'https:';
- info.httpModule = usingSsl ? https : http;
- const defaultPort = usingSsl ? 443 : 80;
- info.options = {};
- info.options.host = info.parsedUrl.hostname;
- info.options.port = info.parsedUrl.port
- ? parseInt(info.parsedUrl.port)
- : defaultPort;
- info.options.path =
- (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
- info.options.method = method;
- info.options.headers = this._mergeHeaders(headers);
- if (this.userAgent != null) {
- info.options.headers['user-agent'] = this.userAgent;
- }
- info.options.agent = this._getAgent(info.parsedUrl);
- // gives handlers an opportunity to participate
- if (this.handlers) {
- this.handlers.forEach(handler => {
- handler.prepareRequest(info.options);
- });
- }
- return info;
- }
- _mergeHeaders(headers) {
- const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
- if (this.requestOptions && this.requestOptions.headers) {
- return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
- }
- return lowercaseKeys(headers || {});
- }
- _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
- const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
- let clientHeader;
- if (this.requestOptions && this.requestOptions.headers) {
- clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
- }
- return additionalHeaders[header] || clientHeader || _default;
- }
- _getAgent(parsedUrl) {
- let agent;
- let proxyUrl = pm.getProxyUrl(parsedUrl);
- let useProxy = proxyUrl && proxyUrl.hostname;
- if (this._keepAlive && useProxy) {
- agent = this._proxyAgent;
- }
- if (this._keepAlive && !useProxy) {
- agent = this._agent;
- }
- // if agent is already assigned use that agent.
- if (!!agent) {
- return agent;
- }
- const usingSsl = parsedUrl.protocol === 'https:';
- let maxSockets = 100;
- if (!!this.requestOptions) {
- maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
- }
- if (useProxy) {
- // If using proxy, need tunnel
- if (!tunnel) {
- tunnel = __webpack_require__(413);
- }
- const agentOptions = {
- maxSockets: maxSockets,
- keepAlive: this._keepAlive,
- proxy: {
- proxyAuth: proxyUrl.auth,
- host: proxyUrl.hostname,
- port: proxyUrl.port
- }
- };
- let tunnelAgent;
- const overHttps = proxyUrl.protocol === 'https:';
- if (usingSsl) {
- tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
- }
- else {
- tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
- }
- agent = tunnelAgent(agentOptions);
- this._proxyAgent = agent;
- }
- // if reusing agent across request and tunneling agent isn't assigned create a new agent
- if (this._keepAlive && !agent) {
- const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
- agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
- this._agent = agent;
- }
- // if not using private agent and tunnel agent isn't setup then use global agent
- if (!agent) {
- agent = usingSsl ? https.globalAgent : http.globalAgent;
- }
- if (usingSsl && this._ignoreSslError) {
- // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
- // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
- // we have to cast it to any and change it directly
- agent.options = Object.assign(agent.options || {}, {
- rejectUnauthorized: false
- });
- }
- return agent;
- }
- _performExponentialBackoff(retryNumber) {
- retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
- const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
- return new Promise(resolve => setTimeout(() => resolve(), ms));
- }
- static dateTimeDeserializer(key, value) {
- if (typeof value === 'string') {
- let a = new Date(value);
- if (!isNaN(a.valueOf())) {
- return a;
- }
- }
- return value;
- }
- async _processResponse(res, options) {
- return new Promise(async (resolve, reject) => {
- const statusCode = res.message.statusCode;
- const response = {
- statusCode: statusCode,
- result: null,
- headers: {}
- };
- // not found leads to null obj returned
- if (statusCode == HttpCodes.NotFound) {
- resolve(response);
- }
- let obj;
- let contents;
- // get the result from the body
- try {
- contents = await res.readBody();
- if (contents && contents.length > 0) {
- if (options && options.deserializeDates) {
- obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
- }
- else {
- obj = JSON.parse(contents);
- }
- response.result = obj;
- }
- response.headers = res.message.headers;
- }
- catch (err) {
- // Invalid resource (contents not json); leaving result obj null
- }
- // note that 3xx redirects are handled by the http layer.
- if (statusCode > 299) {
- let msg;
- // if exception/error in body, attempt to get better error
- if (obj && obj.message) {
- msg = obj.message;
- }
- else if (contents && contents.length > 0) {
- // it may be the case that the exception is in the body message as string
- msg = contents;
- }
- else {
- msg = 'Failed request: (' + statusCode + ')';
- }
- let err = new Error(msg);
- // attach statusCode and body obj (if available) to the error object
- err['statusCode'] = statusCode;
- if (response.result) {
- err['result'] = response.result;
- }
- reject(err);
- }
- else {
- resolve(response);
- }
- });
- }
-}
-exports.HttpClient = HttpClient;
-
-
-/***/ }),
-
-/***/ 570:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const exec_1 = __webpack_require__(986);
-const io = __importStar(__webpack_require__(1));
-const fs_1 = __webpack_require__(747);
-const path = __importStar(__webpack_require__(622));
-const constants_1 = __webpack_require__(929);
-function isGnuTar() {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug("Checking tar --version");
- let versionOutput = "";
- yield exec_1.exec("tar --version", [], {
- ignoreReturnCode: true,
- silent: true,
- listeners: {
- stdout: (data) => (versionOutput += data.toString()),
- stderr: (data) => (versionOutput += data.toString())
- }
- });
- core.debug(versionOutput.trim());
- return versionOutput.toUpperCase().includes("GNU TAR");
- });
-}
-exports.isGnuTar = isGnuTar;
-function getTarPath(args) {
- return __awaiter(this, void 0, void 0, function* () {
- // Explicitly use BSD Tar on Windows
- const IS_WINDOWS = process.platform === "win32";
- if (IS_WINDOWS) {
- const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
- if (fs_1.existsSync(systemTar)) {
- return systemTar;
- }
- else if (isGnuTar()) {
- args.push("--force-local");
- }
- }
- return yield io.which("tar", true);
- });
-}
-function execTar(args, cwd) {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
- }
- catch (error) {
- throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
- }
- });
-}
-function getWorkingDirectory() {
- var _a;
- return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd();
-}
-function extractTar(archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- // Create directory to extract tar into
- const workingDirectory = getWorkingDirectory();
- yield io.mkdirP(workingDirectory);
- const args = [
- "-xz",
- "-f",
- archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "-P",
- "-C",
- workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
- ];
- yield execTar(args);
- });
-}
-exports.extractTar = extractTar;
-function createTar(archiveFolder, sourceDirectories) {
- return __awaiter(this, void 0, void 0, function* () {
- // Write source directories to manifest.txt to avoid command length limits
- const manifestFilename = "manifest.txt";
- fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
- const workingDirectory = getWorkingDirectory();
- const args = [
- "-cz",
- "-f",
- constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "-P",
- "-C",
- workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
- "--files-from",
- manifestFilename
- ];
- yield execTar(args, archiveFolder);
- });
-}
-exports.createTar = createTar;
-
-
-/***/ }),
-
-/***/ 597:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const pathHelper = __webpack_require__(972);
-const internal_match_kind_1 = __webpack_require__(327);
-const IS_WINDOWS = process.platform === 'win32';
-/**
- * Given an array of patterns, returns an array of paths to search.
- * Duplicates and paths under other included paths are filtered out.
- */
-function getSearchPaths(patterns) {
- // Ignore negate patterns
- patterns = patterns.filter(x => !x.negate);
- // Create a map of all search paths
- const searchPathMap = {};
- for (const pattern of patterns) {
- const key = IS_WINDOWS
- ? pattern.searchPath.toUpperCase()
- : pattern.searchPath;
- searchPathMap[key] = 'candidate';
- }
- const result = [];
- for (const pattern of patterns) {
- // Check if already included
- const key = IS_WINDOWS
- ? pattern.searchPath.toUpperCase()
- : pattern.searchPath;
- if (searchPathMap[key] === 'included') {
- continue;
- }
- // Check for an ancestor search path
- let foundAncestor = false;
- let tempKey = key;
- let parent = pathHelper.dirname(tempKey);
- while (parent !== tempKey) {
- if (searchPathMap[parent]) {
- foundAncestor = true;
- break;
- }
- tempKey = parent;
- parent = pathHelper.dirname(tempKey);
- }
- // Include the search pattern in the result
- if (!foundAncestor) {
- result.push(pattern.searchPath);
- searchPathMap[key] = 'included';
- }
- }
- return result;
-}
-exports.getSearchPaths = getSearchPaths;
-/**
- * Matches the patterns against the path
- */
-function match(patterns, itemPath) {
- let result = internal_match_kind_1.MatchKind.None;
- for (const pattern of patterns) {
- if (pattern.negate) {
- result &= ~pattern.match(itemPath);
- }
- else {
- result |= pattern.match(itemPath);
- }
- }
- return result;
-}
-exports.match = match;
-/**
- * Checks whether to descend further into the directory
- */
-function partialMatch(patterns, itemPath) {
- return patterns.some(x => !x.negate && x.partialMatch(itemPath));
-}
-exports.partialMatch = partialMatch;
-//# sourceMappingURL=internal-pattern-helper.js.map
-
-/***/ }),
-
-/***/ 601:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __webpack_require__(470);
-/**
- * Returns a copy with defaults filled in.
- */
-function getOptions(copy) {
- const result = {
- followSymbolicLinks: true,
- implicitDescendants: true,
- omitBrokenSymbolicLinks: true
- };
- if (copy) {
- if (typeof copy.followSymbolicLinks === 'boolean') {
- result.followSymbolicLinks = copy.followSymbolicLinks;
- core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
- }
- if (typeof copy.implicitDescendants === 'boolean') {
- result.implicitDescendants = copy.implicitDescendants;
- core.debug(`implicitDescendants '${result.implicitDescendants}'`);
- }
- if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
- result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
- core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
- }
- }
- return result;
-}
-exports.getOptions = getOptions;
-//# sourceMappingURL=internal-glob-options-helper.js.map
-
-/***/ }),
-
-/***/ 605:
-/***/ (function(module) {
-
-module.exports = require("http");
-
-/***/ }),
-
-/***/ 614:
-/***/ (function(module) {
-
-module.exports = require("events");
-
-/***/ }),
-
-/***/ 621:
-/***/ (function(module) {
-
-"use strict";
-
-module.exports = balanced;
-function balanced(a, b, str) {
- if (a instanceof RegExp) a = maybeMatch(a, str);
- if (b instanceof RegExp) b = maybeMatch(b, str);
-
- var r = range(a, b, str);
-
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
-}
-
-function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
-}
-
-balanced.range = range;
-function range(a, b, str) {
- var begs, beg, left, right, result;
- var ai = str.indexOf(a);
- var bi = str.indexOf(b, ai + 1);
- var i = ai;
-
- if (ai >= 0 && bi > 0) {
- begs = [];
- left = str.length;
-
- while (i >= 0 && !result) {
- if (i == ai) {
- begs.push(i);
- ai = str.indexOf(a, i + 1);
- } else if (begs.length == 1) {
- result = [ begs.pop(), bi ];
- } else {
- beg = begs.pop();
- if (beg < left) {
- left = beg;
- right = bi;
- }
-
- bi = str.indexOf(b, i + 1);
- }
-
- i = ai < bi && ai >= 0 ? ai : bi;
- }
-
- if (begs.length) {
- result = [ left, right ];
- }
- }
-
- return result;
-}
-
-
-/***/ }),
-
-/***/ 622:
-/***/ (function(module) {
-
-module.exports = require("path");
-
-/***/ }),
-
-/***/ 631:
-/***/ (function(module) {
-
-module.exports = require("net");
-
-/***/ }),
-
-/***/ 634:
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const tc = __importStar(__webpack_require__(533));
-const path = __importStar(__webpack_require__(622));
-const semver = __importStar(__webpack_require__(280));
-const httpm = __importStar(__webpack_require__(539));
-const sys = __importStar(__webpack_require__(646));
-const core_1 = __webpack_require__(470);
-function downloadGo(versionSpec, stable) {
- return __awaiter(this, void 0, void 0, function* () {
- let toolPath;
- try {
- let match = yield findMatch(versionSpec, stable);
- if (match) {
- // download
- core_1.debug(`match ${match.version}`);
- let downloadUrl = `https://storage.googleapis.com/golang/${match.files[0].filename}`;
- console.log(`Downloading from ${downloadUrl}`);
- let downloadPath = yield tc.downloadTool(downloadUrl);
- core_1.debug(`downloaded to ${downloadPath}`);
- // extract
- console.log('Extracting ...');
- let extPath = sys.getPlatform() == 'windows'
- ? yield tc.extractZip(downloadPath)
- : yield tc.extractTar(downloadPath);
- core_1.debug(`extracted to ${extPath}`);
- // extracts with a root folder that matches the fileName downloaded
- const toolRoot = path.join(extPath, 'go');
- toolPath = yield tc.cacheDir(toolRoot, 'go', makeSemver(match.version));
- }
- }
- catch (error) {
- throw new Error(`Failed to download version ${versionSpec}: ${error}`);
- }
- return toolPath;
- });
-}
-exports.downloadGo = downloadGo;
-function findMatch(versionSpec, stable) {
- return __awaiter(this, void 0, void 0, function* () {
- let archFilter = sys.getArch();
- let platFilter = sys.getPlatform();
- let result;
- let match;
- const dlUrl = 'https://golang.org/dl/?mode=json&include=all';
- let candidates = yield module.exports.getVersions(dlUrl);
- if (!candidates) {
- throw new Error(`golang download url did not return results`);
- }
- let goFile;
- for (let i = 0; i < candidates.length; i++) {
- let candidate = candidates[i];
- let version = makeSemver(candidate.version);
- // 1.13.0 is advertised as 1.13 preventing being able to match exactly 1.13.0
- // since a semver of 1.13 would match latest 1.13
- let parts = version.split('.');
- if (parts.length == 2) {
- version = version + '.0';
- }
- core_1.debug(`check ${version} satisfies ${versionSpec}`);
- if (semver.satisfies(version, versionSpec) &&
- (!stable || candidate.stable === stable)) {
- goFile = candidate.files.find(file => {
- core_1.debug(`${file.arch}===${archFilter} && ${file.os}===${platFilter}`);
- return file.arch === archFilter && file.os === platFilter;
- });
- if (goFile) {
- core_1.debug(`matched ${candidate.version}`);
- match = candidate;
- break;
- }
- }
- }
- if (match && goFile) {
- // clone since we're mutating the file list to be only the file that matches
- result = Object.assign({}, match);
- result.files = [goFile];
- }
- return result;
- });
-}
-exports.findMatch = findMatch;
-function getVersions(dlUrl) {
- return __awaiter(this, void 0, void 0, function* () {
- // this returns versions descending so latest is first
- let http = new httpm.HttpClient('setup-go');
- return (yield http.getJson(dlUrl)).result;
- });
-}
-exports.getVersions = getVersions;
-//
-// Convert the go version syntax into semver for semver matching
-// 1.13.1 => 1.13.1
-// 1.13 => 1.13.0
-// 1.10beta1 => 1.10.0-beta1, 1.10rc1 => 1.10.0-rc1
-// 1.8.5beta1 => 1.8.5-beta1, 1.8.5rc1 => 1.8.5-rc1
-function makeSemver(version) {
- version = version.replace('go', '');
- version = version.replace('beta', '-beta').replace('rc', '-rc');
- let parts = version.split('-');
- let verPart = parts[0];
- let prereleasePart = parts.length > 1 ? `-${parts[1]}` : '';
- let verParts = verPart.split('.');
- if (verParts.length == 2) {
- verPart += '.0';
- }
- return `${verPart}${prereleasePart}`;
-}
-exports.makeSemver = makeSemver;
-
-
-/***/ }),
-
-/***/ 646:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-let os = __webpack_require__(87);
-function getPlatform() {
- // darwin and linux match already
- // freebsd not supported yet but future proofed.
- // 'aix', 'darwin', 'freebsd', 'linux', 'openbsd', 'sunos', and 'win32'
- let plat = os.platform();
- // wants 'darwin', 'freebsd', 'linux', 'windows'
- if (plat === 'win32') {
- plat = 'windows';
- }
- return plat;
-}
-exports.getPlatform = getPlatform;
-function getArch() {
- // 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', 'x32', and 'x64'.
- let arch = os.arch();
- // wants amd64, 386, arm64, armv61, ppc641e, s390x
- // currently not supported by runner but future proofed mapping
- switch (arch) {
- case 'x64':
- arch = 'amd64';
- break;
- // case 'ppc':
- // arch = 'ppc64';
- // break;
- case 'x32':
- arch = '386';
- break;
- }
- return arch;
-}
-exports.getArch = getArch;
-
-
-/***/ }),
-
-/***/ 655:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const tc = __importStar(__webpack_require__(533));
-const path_1 = __importDefault(__webpack_require__(622));
-const main_1 = __webpack_require__(514);
-// The installLint returns path to installed binary of golangci-lint.
-function installLint(versionConfig) {
- return __awaiter(this, void 0, void 0, function* () {
- core.info(`Installing golangci-lint ${versionConfig.TargetVersion}...`);
- const startedAt = Date.now();
- core.info(`Downloading ${versionConfig.AssetURL} ...`);
- const tarGzPath = yield tc.downloadTool(versionConfig.AssetURL);
- const extractedDir = yield tc.extractTar(tarGzPath, process.env.HOME);
- const urlParts = versionConfig.AssetURL.split(`/`);
- const dirName = urlParts[urlParts.length - 1].replace(/\.tar\.gz$/, ``);
- const lintPath = path_1.default.join(extractedDir, dirName, `golangci-lint`);
- core.info(`Installed golangci-lint into ${lintPath} in ${Date.now() - startedAt}ms`);
- return lintPath;
- });
-}
-exports.installLint = installLint;
-function installGo() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- process.env[`INPUT_GO-VERSION`] = `1`;
- yield main_1.run();
- core.info(`Installed Go in ${Date.now() - startedAt}ms`);
- });
-}
-exports.installGo = installGo;
-
-
-/***/ }),
-
-/***/ 669:
-/***/ (function(module) {
-
-module.exports = require("util");
-
-/***/ }),
-
-/***/ 672:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var _a;
-Object.defineProperty(exports, "__esModule", { value: true });
-const assert_1 = __webpack_require__(357);
-const fs = __webpack_require__(747);
-const path = __webpack_require__(622);
-_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
-exports.IS_WINDOWS = process.platform === 'win32';
-function exists(fsPath) {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- yield exports.stat(fsPath);
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- return false;
- }
- throw err;
- }
- return true;
- });
-}
-exports.exists = exists;
-function isDirectory(fsPath, useStat = false) {
- return __awaiter(this, void 0, void 0, function* () {
- const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
- return stats.isDirectory();
- });
-}
-exports.isDirectory = isDirectory;
-/**
- * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
- * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
- */
-function isRooted(p) {
- p = normalizeSeparators(p);
- if (!p) {
- throw new Error('isRooted() parameter "p" cannot be empty');
- }
- if (exports.IS_WINDOWS) {
- return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
- ); // e.g. C: or C:\hello
- }
- return p.startsWith('/');
-}
-exports.isRooted = isRooted;
-/**
- * Recursively create a directory at `fsPath`.
- *
- * This implementation is optimistic, meaning it attempts to create the full
- * path first, and backs up the path stack from there.
- *
- * @param fsPath The path to create
- * @param maxDepth The maximum recursion depth
- * @param depth The current recursion depth
- */
-function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
- return __awaiter(this, void 0, void 0, function* () {
- assert_1.ok(fsPath, 'a path argument must be provided');
- fsPath = path.resolve(fsPath);
- if (depth >= maxDepth)
- return exports.mkdir(fsPath);
- try {
- yield exports.mkdir(fsPath);
- return;
- }
- catch (err) {
- switch (err.code) {
- case 'ENOENT': {
- yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
- yield exports.mkdir(fsPath);
- return;
- }
- default: {
- let stats;
- try {
- stats = yield exports.stat(fsPath);
- }
- catch (err2) {
- throw err;
- }
- if (!stats.isDirectory())
- throw err;
- }
- }
- }
- });
-}
-exports.mkdirP = mkdirP;
-/**
- * Best effort attempt to determine whether a file exists and is executable.
- * @param filePath file path to check
- * @param extensions additional file extensions to try
- * @return if file exists and is executable, returns the file path. otherwise empty string.
- */
-function tryGetExecutablePath(filePath, extensions) {
- return __awaiter(this, void 0, void 0, function* () {
- let stats = undefined;
- try {
- // test file exists
- stats = yield exports.stat(filePath);
- }
- catch (err) {
- if (err.code !== 'ENOENT') {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
- }
- }
- if (stats && stats.isFile()) {
- if (exports.IS_WINDOWS) {
- // on Windows, test for valid extension
- const upperExt = path.extname(filePath).toUpperCase();
- if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
- return filePath;
- }
- }
- else {
- if (isUnixExecutable(stats)) {
- return filePath;
- }
- }
- }
- // try each extension
- const originalFilePath = filePath;
- for (const extension of extensions) {
- filePath = originalFilePath + extension;
- stats = undefined;
- try {
- stats = yield exports.stat(filePath);
- }
- catch (err) {
- if (err.code !== 'ENOENT') {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
- }
- }
- if (stats && stats.isFile()) {
- if (exports.IS_WINDOWS) {
- // preserve the case of the actual file (since an extension was appended)
- try {
- const directory = path.dirname(filePath);
- const upperName = path.basename(filePath).toUpperCase();
- for (const actualName of yield exports.readdir(directory)) {
- if (upperName === actualName.toUpperCase()) {
- filePath = path.join(directory, actualName);
- break;
- }
- }
- }
- catch (err) {
- // eslint-disable-next-line no-console
- console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
- }
- return filePath;
- }
- else {
- if (isUnixExecutable(stats)) {
- return filePath;
- }
- }
- }
- }
- return '';
- });
-}
-exports.tryGetExecutablePath = tryGetExecutablePath;
-function normalizeSeparators(p) {
- p = p || '';
- if (exports.IS_WINDOWS) {
- // convert slashes on Windows
- p = p.replace(/\//g, '\\');
- // remove redundant slashes
- return p.replace(/\\\\+/g, '\\');
- }
- // remove redundant slashes
- return p.replace(/\/\/+/g, '/');
-}
-// on Mac/Linux, test the execute bit
-// R W X R W X R W X
-// 256 128 64 32 16 8 4 2 1
-function isUnixExecutable(stats) {
- return ((stats.mode & 1) > 0 ||
- ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
- ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
-}
-//# sourceMappingURL=io-util.js.map
-
-/***/ }),
-
-/***/ 722:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const restore_1 = __importDefault(__webpack_require__(206));
-const save_1 = __importDefault(__webpack_require__(781));
-const crypto = __importStar(__webpack_require__(417));
-const fs = __importStar(__webpack_require__(747));
-function checksumFile(hashName, path) {
- return new Promise((resolve, reject) => {
- const hash = crypto.createHash(hashName);
- const stream = fs.createReadStream(path);
- stream.on("error", err => reject(err));
- stream.on("data", chunk => hash.update(chunk));
- stream.on("end", () => resolve(hash.digest("hex")));
- });
-}
-const pathExists = (path) => __awaiter(void 0, void 0, void 0, function* () { return !!(yield fs.promises.stat(path).catch(() => false)); });
-const getLintCacheDir = () => `${process.env.HOME}/.cache/golangci-lint`;
-const getCacheDirs = () => {
- // Not existing dirs are ok here: it works.
- return [getLintCacheDir(), `${process.env.HOME}/.cache/go-build`, `${process.env.HOME}/go/pkg`];
-};
-const getIntervalKey = (invalidationIntervalDays) => {
- const now = new Date();
- const secondsSinceEpoch = now.getTime() / 1000;
- const intervalNumber = Math.floor(secondsSinceEpoch / (invalidationIntervalDays * 86400));
- return intervalNumber.toString();
-};
-function buildCacheKeys() {
- return __awaiter(this, void 0, void 0, function* () {
- const keys = [];
- let cacheKey = `golangci-lint.cache-`;
- keys.push(cacheKey);
- // Periodically invalidate a cache because a new code being added.
- // TODO: configure it via inputs.
- cacheKey += `${getIntervalKey(7)}-`;
- keys.push(cacheKey);
- if (yield pathExists(`go.mod`)) {
- // Add checksum to key to invalidate a cache when dependencies change.
- cacheKey += yield checksumFile(`sha1`, `go.mod`);
- }
- else {
- cacheKey += `nogomod`;
- }
- keys.push(cacheKey);
- return keys;
- });
-}
-function restoreCache() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- const keys = yield buildCacheKeys();
- const primaryKey = keys.pop();
- const restoreKeys = keys.reverse();
- core.info(`Primary analysis cache key is '${primaryKey}', restore keys are '${restoreKeys.join(` | `)}'`);
- process.env[`INPUT_RESTORE-KEYS`] = restoreKeys.join(`\n`);
- process.env.INPUT_KEY = primaryKey;
- process.env.INPUT_PATH = getCacheDirs().join(`\n`);
- // Tell golangci-lint to use our cache directory.
- process.env.GOLANGCI_LINT_CACHE = getLintCacheDir();
- yield restore_1.default();
- core.info(`Restored cache for golangci-lint from key '${primaryKey}' in ${Date.now() - startedAt}ms`);
- });
-}
-exports.restoreCache = restoreCache;
-function saveCache() {
- return __awaiter(this, void 0, void 0, function* () {
- const startedAt = Date.now();
- const cacheDirs = getCacheDirs();
- process.env.INPUT_PATH = cacheDirs.join(`\n`);
- yield save_1.default();
- core.info(`Saved cache for golangci-lint from paths '${cacheDirs.join(`, `)}' in ${Date.now() - startedAt}ms`);
- });
-}
-exports.saveCache = saveCache;
-
-
-/***/ }),
-
-/***/ 728:
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-class SearchState {
- constructor(path, level) {
- this.path = path;
- this.level = level;
- }
-}
-exports.SearchState = SearchState;
-//# sourceMappingURL=internal-search-state.js.map
-
-/***/ }),
-
-/***/ 747:
-/***/ (function(module) {
-
-module.exports = require("fs");
-
-/***/ }),
-
-/***/ 781:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const path = __importStar(__webpack_require__(622));
-const cacheHttpClient = __importStar(__webpack_require__(342));
-const constants_1 = __webpack_require__(929);
-const tar_1 = __webpack_require__(570);
-const utils = __importStar(__webpack_require__(867));
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- if (!utils.isValidEvent()) {
- utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
- .getSupportedEvents()
- .join(", ")} events are supported at this time.`);
- return;
- }
- const state = utils.getCacheState();
- // Inputs are re-evaluted before the post action, so we want the original key used for restore
- const primaryKey = core.getState(constants_1.State.CacheKey);
- if (!primaryKey) {
- utils.logWarning(`Error retrieving key from state.`);
- return;
- }
- if (utils.isExactKeyMatch(primaryKey, state)) {
- core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
- return;
- }
- core.debug("Reserving Cache");
- const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
- if (cacheId == -1) {
- core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
- return;
- }
- core.debug(`Cache ID: ${cacheId}`);
- const cachePaths = yield utils.resolvePaths(core
- .getInput(constants_1.Inputs.Path, { required: true })
- .split("\n")
- .filter(x => x !== ""));
- core.debug("Cache Paths:");
- core.debug(`${JSON.stringify(cachePaths)}`);
- const archiveFolder = yield utils.createTempDirectory();
- const archivePath = path.join(archiveFolder, constants_1.CacheFilename);
- core.debug(`Archive Path: ${archivePath}`);
- yield tar_1.createTar(archiveFolder, cachePaths);
- const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
- const archiveFileSize = utils.getArchiveFileSize(archivePath);
- core.debug(`File Size: ${archiveFileSize}`);
- if (archiveFileSize > fileSizeLimit) {
- utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
- return;
- }
- core.debug(`Saving Cache (ID: ${cacheId})`);
- yield cacheHttpClient.saveCache(cacheId, archivePath);
- }
- catch (error) {
- utils.logWarning(error.message);
- }
- });
-}
-exports.default = run;
-
-
-/***/ }),
-
-/***/ 794:
-/***/ (function(module) {
-
-module.exports = require("stream");
-
-/***/ }),
-
-/***/ 826:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var rng = __webpack_require__(139);
-var bytesToUuid = __webpack_require__(105);
-
-function v4(options, buf, offset) {
- var i = buf && offset || 0;
-
- if (typeof(options) == 'string') {
- buf = options === 'binary' ? new Array(16) : null;
- options = null;
- }
- options = options || {};
-
- var rnds = options.random || (options.rng || rng)();
-
- // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
- rnds[6] = (rnds[6] & 0x0f) | 0x40;
- rnds[8] = (rnds[8] & 0x3f) | 0x80;
-
- // Copy bytes to buffer, if provided
- if (buf) {
- for (var ii = 0; ii < 16; ++ii) {
- buf[i + ii] = rnds[ii];
- }
- }
-
- return buf || bytesToUuid(rnds);
-}
-
-module.exports = v4;
-
-
-/***/ }),
-
-/***/ 835:
-/***/ (function(module) {
-
-module.exports = require("url");
-
/***/ }),
/***/ 867:
@@ -7680,7 +33186,7 @@ const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
const util = __importStar(__webpack_require__(669));
const uuidV4 = __importStar(__webpack_require__(826));
-const constants_1 = __webpack_require__(929);
+const constants_1 = __webpack_require__(573);
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
return __awaiter(this, void 0, void 0, function* () {
@@ -7795,6 +33301,1153 @@ function unlinkFile(path) {
exports.unlinkFile = unlinkFile;
+/***/ }),
+
+/***/ 881:
+/***/ (function(module) {
+
+"use strict";
+
+
+const isWin = process.platform === 'win32';
+
+function notFoundError(original, syscall) {
+ return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
+ code: 'ENOENT',
+ errno: 'ENOENT',
+ syscall: `${syscall} ${original.command}`,
+ path: original.command,
+ spawnargs: original.args,
+ });
+}
+
+function hookChildProcess(cp, parsed) {
+ if (!isWin) {
+ return;
+ }
+
+ const originalEmit = cp.emit;
+
+ cp.emit = function (name, arg1) {
+ // If emitting "exit" event and exit code is 1, we need to check if
+ // the command exists and emit an "error" instead
+ // See https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ if (name === 'exit') {
+ const err = verifyENOENT(arg1, parsed, 'spawn');
+
+ if (err) {
+ return originalEmit.call(cp, 'error', err);
+ }
+ }
+
+ return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
+ };
+}
+
+function verifyENOENT(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawn');
+ }
+
+ return null;
+}
+
+function verifyENOENTSync(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawnSync');
+ }
+
+ return null;
+}
+
+module.exports = {
+ hookChildProcess,
+ verifyENOENT,
+ verifyENOENTSync,
+ notFoundError,
+};
+
+
+/***/ }),
+
+/***/ 883:
+/***/ (function(module) {
+
+/**
+ * lodash (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+
+/** Used as the `TypeError` message for "Functions" methods. */
+var FUNC_ERROR_TEXT = 'Expected a function';
+
+/** Used to stand-in for `undefined` hash values. */
+var HASH_UNDEFINED = '__lodash_hash_undefined__';
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0,
+ MAX_SAFE_INTEGER = 9007199254740991;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]',
+ symbolTag = '[object Symbol]';
+
+/** Used to match property names within property paths. */
+var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
+ reIsPlainProp = /^\w*$/,
+ reLeadingDot = /^\./,
+ rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
+
+/**
+ * Used to match `RegExp`
+ * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
+ */
+var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
+
+/** Used to match backslashes in property paths. */
+var reEscapeChar = /\\(\\)?/g;
+
+/** Used to detect host constructors (Safari). */
+var reIsHostCtor = /^\[object .+?Constructor\]$/;
+
+/** Used to detect unsigned integer values. */
+var reIsUint = /^(?:0|[1-9]\d*)$/;
+
+/** Detect free variable `global` from Node.js. */
+var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
+
+/** Detect free variable `self`. */
+var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
+
+/** Used as a reference to the global object. */
+var root = freeGlobal || freeSelf || Function('return this')();
+
+/**
+ * Gets the value at `key` of `object`.
+ *
+ * @private
+ * @param {Object} [object] The object to query.
+ * @param {string} key The key of the property to get.
+ * @returns {*} Returns the property value.
+ */
+function getValue(object, key) {
+ return object == null ? undefined : object[key];
+}
+
+/**
+ * Checks if `value` is a host object in IE < 9.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a host object, else `false`.
+ */
+function isHostObject(value) {
+ // Many host objects are `Object` objects that can coerce to strings
+ // despite having improperly defined `toString` methods.
+ var result = false;
+ if (value != null && typeof value.toString != 'function') {
+ try {
+ result = !!(value + '');
+ } catch (e) {}
+ }
+ return result;
+}
+
+/** Used for built-in method references. */
+var arrayProto = Array.prototype,
+ funcProto = Function.prototype,
+ objectProto = Object.prototype;
+
+/** Used to detect overreaching core-js shims. */
+var coreJsData = root['__core-js_shared__'];
+
+/** Used to detect methods masquerading as native. */
+var maskSrcKey = (function() {
+ var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
+ return uid ? ('Symbol(src)_1.' + uid) : '';
+}());
+
+/** Used to resolve the decompiled source of functions. */
+var funcToString = funcProto.toString;
+
+/** Used to check objects for own properties. */
+var hasOwnProperty = objectProto.hasOwnProperty;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/** Used to detect if a method is native. */
+var reIsNative = RegExp('^' +
+ funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
+ .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
+);
+
+/** Built-in value references. */
+var Symbol = root.Symbol,
+ splice = arrayProto.splice;
+
+/* Built-in method references that are verified to be native. */
+var Map = getNative(root, 'Map'),
+ nativeCreate = getNative(Object, 'create');
+
+/** Used to convert symbols to primitives and strings. */
+var symbolProto = Symbol ? Symbol.prototype : undefined,
+ symbolToString = symbolProto ? symbolProto.toString : undefined;
+
+/**
+ * Creates a hash object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function Hash(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the hash.
+ *
+ * @private
+ * @name clear
+ * @memberOf Hash
+ */
+function hashClear() {
+ this.__data__ = nativeCreate ? nativeCreate(null) : {};
+}
+
+/**
+ * Removes `key` and its value from the hash.
+ *
+ * @private
+ * @name delete
+ * @memberOf Hash
+ * @param {Object} hash The hash to modify.
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function hashDelete(key) {
+ return this.has(key) && delete this.__data__[key];
+}
+
+/**
+ * Gets the hash value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf Hash
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function hashGet(key) {
+ var data = this.__data__;
+ if (nativeCreate) {
+ var result = data[key];
+ return result === HASH_UNDEFINED ? undefined : result;
+ }
+ return hasOwnProperty.call(data, key) ? data[key] : undefined;
+}
+
+/**
+ * Checks if a hash value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf Hash
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function hashHas(key) {
+ var data = this.__data__;
+ return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
+}
+
+/**
+ * Sets the hash `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf Hash
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the hash instance.
+ */
+function hashSet(key, value) {
+ var data = this.__data__;
+ data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
+ return this;
+}
+
+// Add methods to `Hash`.
+Hash.prototype.clear = hashClear;
+Hash.prototype['delete'] = hashDelete;
+Hash.prototype.get = hashGet;
+Hash.prototype.has = hashHas;
+Hash.prototype.set = hashSet;
+
+/**
+ * Creates an list cache object.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function ListCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the list cache.
+ *
+ * @private
+ * @name clear
+ * @memberOf ListCache
+ */
+function listCacheClear() {
+ this.__data__ = [];
+}
+
+/**
+ * Removes `key` and its value from the list cache.
+ *
+ * @private
+ * @name delete
+ * @memberOf ListCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function listCacheDelete(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ return false;
+ }
+ var lastIndex = data.length - 1;
+ if (index == lastIndex) {
+ data.pop();
+ } else {
+ splice.call(data, index, 1);
+ }
+ return true;
+}
+
+/**
+ * Gets the list cache value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf ListCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function listCacheGet(key) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ return index < 0 ? undefined : data[index][1];
+}
+
+/**
+ * Checks if a list cache value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf ListCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function listCacheHas(key) {
+ return assocIndexOf(this.__data__, key) > -1;
+}
+
+/**
+ * Sets the list cache `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf ListCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the list cache instance.
+ */
+function listCacheSet(key, value) {
+ var data = this.__data__,
+ index = assocIndexOf(data, key);
+
+ if (index < 0) {
+ data.push([key, value]);
+ } else {
+ data[index][1] = value;
+ }
+ return this;
+}
+
+// Add methods to `ListCache`.
+ListCache.prototype.clear = listCacheClear;
+ListCache.prototype['delete'] = listCacheDelete;
+ListCache.prototype.get = listCacheGet;
+ListCache.prototype.has = listCacheHas;
+ListCache.prototype.set = listCacheSet;
+
+/**
+ * Creates a map cache object to store key-value pairs.
+ *
+ * @private
+ * @constructor
+ * @param {Array} [entries] The key-value pairs to cache.
+ */
+function MapCache(entries) {
+ var index = -1,
+ length = entries ? entries.length : 0;
+
+ this.clear();
+ while (++index < length) {
+ var entry = entries[index];
+ this.set(entry[0], entry[1]);
+ }
+}
+
+/**
+ * Removes all key-value entries from the map.
+ *
+ * @private
+ * @name clear
+ * @memberOf MapCache
+ */
+function mapCacheClear() {
+ this.__data__ = {
+ 'hash': new Hash,
+ 'map': new (Map || ListCache),
+ 'string': new Hash
+ };
+}
+
+/**
+ * Removes `key` and its value from the map.
+ *
+ * @private
+ * @name delete
+ * @memberOf MapCache
+ * @param {string} key The key of the value to remove.
+ * @returns {boolean} Returns `true` if the entry was removed, else `false`.
+ */
+function mapCacheDelete(key) {
+ return getMapData(this, key)['delete'](key);
+}
+
+/**
+ * Gets the map value for `key`.
+ *
+ * @private
+ * @name get
+ * @memberOf MapCache
+ * @param {string} key The key of the value to get.
+ * @returns {*} Returns the entry value.
+ */
+function mapCacheGet(key) {
+ return getMapData(this, key).get(key);
+}
+
+/**
+ * Checks if a map value for `key` exists.
+ *
+ * @private
+ * @name has
+ * @memberOf MapCache
+ * @param {string} key The key of the entry to check.
+ * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
+ */
+function mapCacheHas(key) {
+ return getMapData(this, key).has(key);
+}
+
+/**
+ * Sets the map `key` to `value`.
+ *
+ * @private
+ * @name set
+ * @memberOf MapCache
+ * @param {string} key The key of the value to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns the map cache instance.
+ */
+function mapCacheSet(key, value) {
+ getMapData(this, key).set(key, value);
+ return this;
+}
+
+// Add methods to `MapCache`.
+MapCache.prototype.clear = mapCacheClear;
+MapCache.prototype['delete'] = mapCacheDelete;
+MapCache.prototype.get = mapCacheGet;
+MapCache.prototype.has = mapCacheHas;
+MapCache.prototype.set = mapCacheSet;
+
+/**
+ * Assigns `value` to `key` of `object` if the existing value is not equivalent
+ * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * for equality comparisons.
+ *
+ * @private
+ * @param {Object} object The object to modify.
+ * @param {string} key The key of the property to assign.
+ * @param {*} value The value to assign.
+ */
+function assignValue(object, key, value) {
+ var objValue = object[key];
+ if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) ||
+ (value === undefined && !(key in object))) {
+ object[key] = value;
+ }
+}
+
+/**
+ * Gets the index at which the `key` is found in `array` of key-value pairs.
+ *
+ * @private
+ * @param {Array} array The array to inspect.
+ * @param {*} key The key to search for.
+ * @returns {number} Returns the index of the matched value, else `-1`.
+ */
+function assocIndexOf(array, key) {
+ var length = array.length;
+ while (length--) {
+ if (eq(array[length][0], key)) {
+ return length;
+ }
+ }
+ return -1;
+}
+
+/**
+ * The base implementation of `_.isNative` without bad shim checks.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a native function,
+ * else `false`.
+ */
+function baseIsNative(value) {
+ if (!isObject(value) || isMasked(value)) {
+ return false;
+ }
+ var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
+ return pattern.test(toSource(value));
+}
+
+/**
+ * The base implementation of `_.set`.
+ *
+ * @private
+ * @param {Object} object The object to modify.
+ * @param {Array|string} path The path of the property to set.
+ * @param {*} value The value to set.
+ * @param {Function} [customizer] The function to customize path creation.
+ * @returns {Object} Returns `object`.
+ */
+function baseSet(object, path, value, customizer) {
+ if (!isObject(object)) {
+ return object;
+ }
+ path = isKey(path, object) ? [path] : castPath(path);
+
+ var index = -1,
+ length = path.length,
+ lastIndex = length - 1,
+ nested = object;
+
+ while (nested != null && ++index < length) {
+ var key = toKey(path[index]),
+ newValue = value;
+
+ if (index != lastIndex) {
+ var objValue = nested[key];
+ newValue = customizer ? customizer(objValue, key, nested) : undefined;
+ if (newValue === undefined) {
+ newValue = isObject(objValue)
+ ? objValue
+ : (isIndex(path[index + 1]) ? [] : {});
+ }
+ }
+ assignValue(nested, key, newValue);
+ nested = nested[key];
+ }
+ return object;
+}
+
+/**
+ * The base implementation of `_.toString` which doesn't convert nullish
+ * values to empty strings.
+ *
+ * @private
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ */
+function baseToString(value) {
+ // Exit early for strings to avoid a performance hit in some environments.
+ if (typeof value == 'string') {
+ return value;
+ }
+ if (isSymbol(value)) {
+ return symbolToString ? symbolToString.call(value) : '';
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Casts `value` to a path array if it's not one.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {Array} Returns the cast property path array.
+ */
+function castPath(value) {
+ return isArray(value) ? value : stringToPath(value);
+}
+
+/**
+ * Gets the data for `map`.
+ *
+ * @private
+ * @param {Object} map The map to query.
+ * @param {string} key The reference key.
+ * @returns {*} Returns the map data.
+ */
+function getMapData(map, key) {
+ var data = map.__data__;
+ return isKeyable(key)
+ ? data[typeof key == 'string' ? 'string' : 'hash']
+ : data.map;
+}
+
+/**
+ * Gets the native function at `key` of `object`.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @param {string} key The key of the method to get.
+ * @returns {*} Returns the function if it's native, else `undefined`.
+ */
+function getNative(object, key) {
+ var value = getValue(object, key);
+ return baseIsNative(value) ? value : undefined;
+}
+
+/**
+ * Checks if `value` is a valid array-like index.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
+ * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
+ */
+function isIndex(value, length) {
+ length = length == null ? MAX_SAFE_INTEGER : length;
+ return !!length &&
+ (typeof value == 'number' || reIsUint.test(value)) &&
+ (value > -1 && value % 1 == 0 && value < length);
+}
+
+/**
+ * Checks if `value` is a property name and not a property path.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {Object} [object] The object to query keys on.
+ * @returns {boolean} Returns `true` if `value` is a property name, else `false`.
+ */
+function isKey(value, object) {
+ if (isArray(value)) {
+ return false;
+ }
+ var type = typeof value;
+ if (type == 'number' || type == 'symbol' || type == 'boolean' ||
+ value == null || isSymbol(value)) {
+ return true;
+ }
+ return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
+ (object != null && value in Object(object));
+}
+
+/**
+ * Checks if `value` is suitable for use as unique object key.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is suitable, else `false`.
+ */
+function isKeyable(value) {
+ var type = typeof value;
+ return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
+ ? (value !== '__proto__')
+ : (value === null);
+}
+
+/**
+ * Checks if `func` has its source masked.
+ *
+ * @private
+ * @param {Function} func The function to check.
+ * @returns {boolean} Returns `true` if `func` is masked, else `false`.
+ */
+function isMasked(func) {
+ return !!maskSrcKey && (maskSrcKey in func);
+}
+
+/**
+ * Converts `string` to a property path array.
+ *
+ * @private
+ * @param {string} string The string to convert.
+ * @returns {Array} Returns the property path array.
+ */
+var stringToPath = memoize(function(string) {
+ string = toString(string);
+
+ var result = [];
+ if (reLeadingDot.test(string)) {
+ result.push('');
+ }
+ string.replace(rePropName, function(match, number, quote, string) {
+ result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
+ });
+ return result;
+});
+
+/**
+ * Converts `value` to a string key if it's not a string or symbol.
+ *
+ * @private
+ * @param {*} value The value to inspect.
+ * @returns {string|symbol} Returns the key.
+ */
+function toKey(value) {
+ if (typeof value == 'string' || isSymbol(value)) {
+ return value;
+ }
+ var result = (value + '');
+ return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
+}
+
+/**
+ * Converts `func` to its source code.
+ *
+ * @private
+ * @param {Function} func The function to process.
+ * @returns {string} Returns the source code.
+ */
+function toSource(func) {
+ if (func != null) {
+ try {
+ return funcToString.call(func);
+ } catch (e) {}
+ try {
+ return (func + '');
+ } catch (e) {}
+ }
+ return '';
+}
+
+/**
+ * Creates a function that memoizes the result of `func`. If `resolver` is
+ * provided, it determines the cache key for storing the result based on the
+ * arguments provided to the memoized function. By default, the first argument
+ * provided to the memoized function is used as the map cache key. The `func`
+ * is invoked with the `this` binding of the memoized function.
+ *
+ * **Note:** The cache is exposed as the `cache` property on the memoized
+ * function. Its creation may be customized by replacing the `_.memoize.Cache`
+ * constructor with one whose instances implement the
+ * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
+ * method interface of `delete`, `get`, `has`, and `set`.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Function
+ * @param {Function} func The function to have its output memoized.
+ * @param {Function} [resolver] The function to resolve the cache key.
+ * @returns {Function} Returns the new memoized function.
+ * @example
+ *
+ * var object = { 'a': 1, 'b': 2 };
+ * var other = { 'c': 3, 'd': 4 };
+ *
+ * var values = _.memoize(_.values);
+ * values(object);
+ * // => [1, 2]
+ *
+ * values(other);
+ * // => [3, 4]
+ *
+ * object.a = 2;
+ * values(object);
+ * // => [1, 2]
+ *
+ * // Modify the result cache.
+ * values.cache.set(object, ['a', 'b']);
+ * values(object);
+ * // => ['a', 'b']
+ *
+ * // Replace `_.memoize.Cache`.
+ * _.memoize.Cache = WeakMap;
+ */
+function memoize(func, resolver) {
+ if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
+ throw new TypeError(FUNC_ERROR_TEXT);
+ }
+ var memoized = function() {
+ var args = arguments,
+ key = resolver ? resolver.apply(this, args) : args[0],
+ cache = memoized.cache;
+
+ if (cache.has(key)) {
+ return cache.get(key);
+ }
+ var result = func.apply(this, args);
+ memoized.cache = cache.set(key, result);
+ return result;
+ };
+ memoized.cache = new (memoize.Cache || MapCache);
+ return memoized;
+}
+
+// Assign cache to `_.memoize`.
+memoize.Cache = MapCache;
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'a': 1 };
+ * var other = { 'a': 1 };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is classified as an `Array` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an array, else `false`.
+ * @example
+ *
+ * _.isArray([1, 2, 3]);
+ * // => true
+ *
+ * _.isArray(document.body.children);
+ * // => false
+ *
+ * _.isArray('abc');
+ * // => false
+ *
+ * _.isArray(_.noop);
+ * // => false
+ */
+var isArray = Array.isArray;
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a function, else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8-9 which returns 'object' for typed array and other constructors.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * Checks if `value` is object-like. A value is object-like if it's not `null`
+ * and has a `typeof` result of "object".
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
+ * @example
+ *
+ * _.isObjectLike({});
+ * // => true
+ *
+ * _.isObjectLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isObjectLike(_.noop);
+ * // => false
+ *
+ * _.isObjectLike(null);
+ * // => false
+ */
+function isObjectLike(value) {
+ return !!value && typeof value == 'object';
+}
+
+/**
+ * Checks if `value` is classified as a `Symbol` primitive or object.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
+ * @example
+ *
+ * _.isSymbol(Symbol.iterator);
+ * // => true
+ *
+ * _.isSymbol('abc');
+ * // => false
+ */
+function isSymbol(value) {
+ return typeof value == 'symbol' ||
+ (isObjectLike(value) && objectToString.call(value) == symbolTag);
+}
+
+/**
+ * Converts `value` to a string. An empty string is returned for `null`
+ * and `undefined` values. The sign of `-0` is preserved.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to process.
+ * @returns {string} Returns the string.
+ * @example
+ *
+ * _.toString(null);
+ * // => ''
+ *
+ * _.toString(-0);
+ * // => '-0'
+ *
+ * _.toString([1, 2, 3]);
+ * // => '1,2,3'
+ */
+function toString(value) {
+ return value == null ? '' : baseToString(value);
+}
+
+/**
+ * Sets the value at `path` of `object`. If a portion of `path` doesn't exist,
+ * it's created. Arrays are created for missing index properties while objects
+ * are created for all other missing properties. Use `_.setWith` to customize
+ * `path` creation.
+ *
+ * **Note:** This method mutates `object`.
+ *
+ * @static
+ * @memberOf _
+ * @since 3.7.0
+ * @category Object
+ * @param {Object} object The object to modify.
+ * @param {Array|string} path The path of the property to set.
+ * @param {*} value The value to set.
+ * @returns {Object} Returns `object`.
+ * @example
+ *
+ * var object = { 'a': [{ 'b': { 'c': 3 } }] };
+ *
+ * _.set(object, 'a[0].b.c', 4);
+ * console.log(object.a[0].b.c);
+ * // => 4
+ *
+ * _.set(object, ['x', '0', 'y', 'z'], 5);
+ * console.log(object.x[0].y.z);
+ * // => 5
+ */
+function set(object, path, value) {
+ return object == null ? object : baseSet(object, path, value);
+}
+
+module.exports = set;
+
+
+/***/ }),
+
+/***/ 890:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationPlugin;
+
+const { createTokenAuth } = __webpack_require__(813);
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const beforeRequest = __webpack_require__(414);
+const requestError = __webpack_require__(446);
+const validate = __webpack_require__(538);
+const withAuthorizationPrefix = __webpack_require__(85);
+
+const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation));
+const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation));
+
+function authenticationPlugin(octokit, options) {
+ // If `options.authStrategy` is set then use it and pass in `options.auth`
+ if (options.authStrategy) {
+ const auth = options.authStrategy(options.auth);
+ octokit.hook.wrap("request", auth.hook);
+ octokit.auth = auth;
+ return;
+ }
+
+ // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
+ // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred.
+ if (!options.auth) {
+ octokit.auth = () =>
+ Promise.resolve({
+ type: "unauthenticated"
+ });
+ return;
+ }
+
+ const isBasicAuthString =
+ typeof options.auth === "string" &&
+ /^basic/.test(withAuthorizationPrefix(options.auth));
+
+ // If only `options.auth` is set to a string, use the default token authentication strategy.
+ if (typeof options.auth === "string" && !isBasicAuthString) {
+ const auth = createTokenAuth(options.auth);
+ octokit.hook.wrap("request", auth.hook);
+ octokit.auth = auth;
+ return;
+ }
+
+ // Otherwise log a deprecation message
+ const [deprecationMethod, deprecationMessapge] = isBasicAuthString
+ ? [
+ deprecateAuthBasic,
+ 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)'
+ ]
+ : [
+ deprecateAuthObject,
+ 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)'
+ ];
+ deprecationMethod(
+ octokit.log,
+ new Deprecation("[@octokit/rest] " + deprecationMessapge)
+ );
+
+ octokit.auth = () =>
+ Promise.resolve({
+ type: "deprecated",
+ message: deprecationMessapge
+ });
+
+ validate(options.auth);
+
+ const state = {
+ octokit,
+ auth: options.auth
+ };
+
+ octokit.hook.before("request", beforeRequest.bind(null, state));
+ octokit.hook.error("request", requestError.bind(null, state));
+}
+
+
/***/ }),
/***/ 896:
@@ -7815,6 +34468,144 @@ var isArray = Array.isArray || function (xs) {
};
+/***/ }),
+
+/***/ 898:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var request = __webpack_require__(753);
+var universalUserAgent = __webpack_require__(796);
+
+const VERSION = "4.3.1";
+
+class GraphqlError extends Error {
+ constructor(request, response) {
+ const message = response.data.errors[0].message;
+ super(message);
+ Object.assign(this, response.data);
+ this.name = "GraphqlError";
+ this.request = request; // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ }
+
+}
+
+const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query"];
+function graphql(request, query, options) {
+ options = typeof query === "string" ? options = Object.assign({
+ query
+ }, options) : options = query;
+ const requestOptions = Object.keys(options).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = options[key];
+ return result;
+ }
+
+ if (!result.variables) {
+ result.variables = {};
+ }
+
+ result.variables[key] = options[key];
+ return result;
+ }, {});
+ return request(requestOptions).then(response => {
+ if (response.data.errors) {
+ throw new GraphqlError(requestOptions, {
+ data: response.data
+ });
+ }
+
+ return response.data.data;
+ });
+}
+
+function withDefaults(request$1, newDefaults) {
+ const newRequest = request$1.defaults(newDefaults);
+
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: request.request.endpoint
+ });
+}
+
+const graphql$1 = withDefaults(request.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ },
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+
+exports.graphql = graphql$1;
+exports.withCustomRequest = withCustomRequest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 916:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+const VERSION = "1.0.0";
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function requestLog(octokit) {
+ octokit.hook.wrap("request", (request, options) => {
+ octokit.log.debug("request", options);
+ const start = Date.now();
+ const requestOptions = octokit.request.endpoint.parse(options);
+ const path = requestOptions.url.replace(options.baseUrl, "");
+ return request(options).then(response => {
+ octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);
+ return response;
+ }).catch(error => {
+ octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);
+ throw error;
+ });
+ });
+}
+requestLog.VERSION = VERSION;
+
+exports.requestLog = requestLog;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 919:
+/***/ (function(module) {
+
+module.exports = {"_args":[["@octokit/rest@16.43.1","/Users/denis-isaev/golangci-lint-action"]],"_from":"@octokit/rest@16.43.1","_id":"@octokit/rest@16.43.1","_inBundle":false,"_integrity":"sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==","_location":"/@actions/github/@octokit/rest","_phantomChildren":{},"_requested":{"type":"version","registry":true,"raw":"@octokit/rest@16.43.1","name":"@octokit/rest","escapedName":"@octokit%2frest","scope":"@octokit","rawSpec":"16.43.1","saveSpec":null,"fetchSpec":"16.43.1"},"_requiredBy":["/@actions/github"],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz","_spec":"16.43.1","_where":"/Users/denis-isaev/golangci-lint-action","author":{"name":"Gregor Martynus","url":"https://github.com/gr2m"},"bugs":{"url":"https://github.com/octokit/rest.js/issues"},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"description":"GitHub REST API client for Node.js","devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^5.1.2","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"files":["index.js","index.d.ts","lib","plugins"],"homepage":"https://github.com/octokit/rest.js#readme","keywords":["octokit","github","rest","api-client"],"license":"MIT","name":"@octokit/rest","nyc":{"ignore":["test"]},"publishConfig":{"access":"public"},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"repository":{"type":"git","url":"git+https://github.com/octokit/rest.js.git"},"scripts":{"build":"npm-run-all build:*","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","build:ts":"npm run -s update-endpoints:typescript","coverage":"nyc report --reporter=html && open coverage/index.html","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","lint":"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","lint:fix":"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","prebuild:browser":"mkdirp dist/","pretest":"npm run -s lint","prevalidate:ts":"npm run -s build:ts","start-fixtures-server":"octokit-fixtures-server","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts"},"types":"index.d.ts","version":"16.43.1"};
+
/***/ }),
/***/ 923:
@@ -8056,38 +34847,37 @@ exports.Pattern = Pattern;
/***/ }),
/***/ 929:
-/***/ (function(__unusedmodule, exports) {
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = hasNextPage
+
+const deprecate = __webpack_require__(370)
+const getPageLinks = __webpack_require__(577)
+
+function hasNextPage (link) {
+ deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
+ return getPageLinks(link).next
+}
+
+
+/***/ }),
+
+/***/ 948:
+/***/ (function(module) {
"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var Inputs;
-(function (Inputs) {
- Inputs["Key"] = "key";
- Inputs["Path"] = "path";
- Inputs["RestoreKeys"] = "restore-keys";
-})(Inputs = exports.Inputs || (exports.Inputs = {}));
-var Outputs;
-(function (Outputs) {
- Outputs["CacheHit"] = "cache-hit";
-})(Outputs = exports.Outputs || (exports.Outputs = {}));
-var State;
-(function (State) {
- State["CacheKey"] = "CACHE_KEY";
- State["CacheResult"] = "CACHE_RESULT";
-})(State = exports.State || (exports.State = {}));
-var Events;
-(function (Events) {
- Events["Key"] = "GITHUB_EVENT_NAME";
- Events["Push"] = "push";
- Events["PullRequest"] = "pull_request";
-})(Events = exports.Events || (exports.Events = {}));
-exports.CacheFilename = "cache.tgz";
-// Socket timeout in milliseconds during download. If no traffic is received
-// over the socket during this period, the socket is destroyed and the download
-// is aborted.
-exports.SocketTimeout = 5000;
+/**
+ * Tries to execute a function and discards any error that occurs.
+ * @param {Function} fn - Function that might or might not throw an error.
+ * @returns {?*} Return-value of the function when no error occurred.
+ */
+module.exports = function(fn) {
+
+ try { return fn() } catch (e) {}
+
+}
/***/ }),
@@ -8155,6 +34945,521 @@ function checkBypass(reqUrl) {
exports.checkBypass = checkBypass;
+/***/ }),
+
+/***/ 953:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = authenticationPlugin;
+
+const { Deprecation } = __webpack_require__(692);
+const once = __webpack_require__(969);
+
+const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
+
+const authenticate = __webpack_require__(790);
+const beforeRequest = __webpack_require__(140);
+const requestError = __webpack_require__(240);
+
+function authenticationPlugin(octokit, options) {
+ if (options.auth) {
+ octokit.authenticate = () => {
+ deprecateAuthenticate(
+ octokit.log,
+ new Deprecation(
+ '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor'
+ )
+ );
+ };
+ return;
+ }
+ const state = {
+ octokit,
+ auth: false
+ };
+ octokit.authenticate = authenticate.bind(null, state);
+ octokit.hook.before("request", beforeRequest.bind(null, state));
+ octokit.hook.error("request", requestError.bind(null, state));
+}
+
+
+/***/ }),
+
+/***/ 955:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const path = __webpack_require__(622);
+const childProcess = __webpack_require__(129);
+const crossSpawn = __webpack_require__(20);
+const stripEof = __webpack_require__(768);
+const npmRunPath = __webpack_require__(621);
+const isStream = __webpack_require__(323);
+const _getStream = __webpack_require__(145);
+const pFinally = __webpack_require__(697);
+const onExit = __webpack_require__(260);
+const errname = __webpack_require__(427);
+const stdio = __webpack_require__(168);
+
+const TEN_MEGABYTES = 1000 * 1000 * 10;
+
+function handleArgs(cmd, args, opts) {
+ let parsed;
+
+ opts = Object.assign({
+ extendEnv: true,
+ env: {}
+ }, opts);
+
+ if (opts.extendEnv) {
+ opts.env = Object.assign({}, process.env, opts.env);
+ }
+
+ if (opts.__winShell === true) {
+ delete opts.__winShell;
+ parsed = {
+ command: cmd,
+ args,
+ options: opts,
+ file: cmd,
+ original: {
+ cmd,
+ args
+ }
+ };
+ } else {
+ parsed = crossSpawn._parse(cmd, args, opts);
+ }
+
+ opts = Object.assign({
+ maxBuffer: TEN_MEGABYTES,
+ buffer: true,
+ stripEof: true,
+ preferLocal: true,
+ localDir: parsed.options.cwd || process.cwd(),
+ encoding: 'utf8',
+ reject: true,
+ cleanup: true
+ }, parsed.options);
+
+ opts.stdio = stdio(opts);
+
+ if (opts.preferLocal) {
+ opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
+ }
+
+ if (opts.detached) {
+ // #115
+ opts.cleanup = false;
+ }
+
+ if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') {
+ // #116
+ parsed.args.unshift('/q');
+ }
+
+ return {
+ cmd: parsed.command,
+ args: parsed.args,
+ opts,
+ parsed
+ };
+}
+
+function handleInput(spawned, input) {
+ if (input === null || input === undefined) {
+ return;
+ }
+
+ if (isStream(input)) {
+ input.pipe(spawned.stdin);
+ } else {
+ spawned.stdin.end(input);
+ }
+}
+
+function handleOutput(opts, val) {
+ if (val && opts.stripEof) {
+ val = stripEof(val);
+ }
+
+ return val;
+}
+
+function handleShell(fn, cmd, opts) {
+ let file = '/bin/sh';
+ let args = ['-c', cmd];
+
+ opts = Object.assign({}, opts);
+
+ if (process.platform === 'win32') {
+ opts.__winShell = true;
+ file = process.env.comspec || 'cmd.exe';
+ args = ['/s', '/c', `"${cmd}"`];
+ opts.windowsVerbatimArguments = true;
+ }
+
+ if (opts.shell) {
+ file = opts.shell;
+ delete opts.shell;
+ }
+
+ return fn(file, args, opts);
+}
+
+function getStream(process, stream, {encoding, buffer, maxBuffer}) {
+ if (!process[stream]) {
+ return null;
+ }
+
+ let ret;
+
+ if (!buffer) {
+ // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
+ ret = new Promise((resolve, reject) => {
+ process[stream]
+ .once('end', resolve)
+ .once('error', reject);
+ });
+ } else if (encoding) {
+ ret = _getStream(process[stream], {
+ encoding,
+ maxBuffer
+ });
+ } else {
+ ret = _getStream.buffer(process[stream], {maxBuffer});
+ }
+
+ return ret.catch(err => {
+ err.stream = stream;
+ err.message = `${stream} ${err.message}`;
+ throw err;
+ });
+}
+
+function makeError(result, options) {
+ const {stdout, stderr} = result;
+
+ let err = result.error;
+ const {code, signal} = result;
+
+ const {parsed, joinedCmd} = options;
+ const timedOut = options.timedOut || false;
+
+ if (!err) {
+ let output = '';
+
+ if (Array.isArray(parsed.opts.stdio)) {
+ if (parsed.opts.stdio[2] !== 'inherit') {
+ output += output.length > 0 ? stderr : `\n${stderr}`;
+ }
+
+ if (parsed.opts.stdio[1] !== 'inherit') {
+ output += `\n${stdout}`;
+ }
+ } else if (parsed.opts.stdio !== 'inherit') {
+ output = `\n${stderr}${stdout}`;
+ }
+
+ err = new Error(`Command failed: ${joinedCmd}${output}`);
+ err.code = code < 0 ? errname(code) : code;
+ }
+
+ err.stdout = stdout;
+ err.stderr = stderr;
+ err.failed = true;
+ err.signal = signal || null;
+ err.cmd = joinedCmd;
+ err.timedOut = timedOut;
+
+ return err;
+}
+
+function joinCmd(cmd, args) {
+ let joinedCmd = cmd;
+
+ if (Array.isArray(args) && args.length > 0) {
+ joinedCmd += ' ' + args.join(' ');
+ }
+
+ return joinedCmd;
+}
+
+module.exports = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const {encoding, buffer, maxBuffer} = parsed.opts;
+ const joinedCmd = joinCmd(cmd, args);
+
+ let spawned;
+ try {
+ spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+
+ let removeExitHandler;
+ if (parsed.opts.cleanup) {
+ removeExitHandler = onExit(() => {
+ spawned.kill();
+ });
+ }
+
+ let timeoutId = null;
+ let timedOut = false;
+
+ const cleanup = () => {
+ if (timeoutId) {
+ clearTimeout(timeoutId);
+ timeoutId = null;
+ }
+
+ if (removeExitHandler) {
+ removeExitHandler();
+ }
+ };
+
+ if (parsed.opts.timeout > 0) {
+ timeoutId = setTimeout(() => {
+ timeoutId = null;
+ timedOut = true;
+ spawned.kill(parsed.opts.killSignal);
+ }, parsed.opts.timeout);
+ }
+
+ const processDone = new Promise(resolve => {
+ spawned.on('exit', (code, signal) => {
+ cleanup();
+ resolve({code, signal});
+ });
+
+ spawned.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+
+ if (spawned.stdin) {
+ spawned.stdin.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+ }
+ });
+
+ function destroy() {
+ if (spawned.stdout) {
+ spawned.stdout.destroy();
+ }
+
+ if (spawned.stderr) {
+ spawned.stderr.destroy();
+ }
+ }
+
+ const handlePromise = () => pFinally(Promise.all([
+ processDone,
+ getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}),
+ getStream(spawned, 'stderr', {encoding, buffer, maxBuffer})
+ ]).then(arr => {
+ const result = arr[0];
+ result.stdout = arr[1];
+ result.stderr = arr[2];
+
+ if (result.error || result.code !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed,
+ timedOut
+ });
+
+ // TODO: missing some timeout logic for killed
+ // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
+ // err.killed = spawned.killed || killed;
+ err.killed = err.killed || spawned.killed;
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ killed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+ }), destroy);
+
+ crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
+
+ handleInput(spawned, parsed.opts.input);
+
+ spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
+ spawned.catch = onrejected => handlePromise().catch(onrejected);
+
+ return spawned;
+};
+
+// TODO: set `stderr: 'ignore'` when that option is implemented
+module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout);
+
+// TODO: set `stdout: 'ignore'` when that option is implemented
+module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr);
+
+module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts);
+
+module.exports.sync = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const joinedCmd = joinCmd(cmd, args);
+
+ if (isStream(parsed.opts.input)) {
+ throw new TypeError('The `input` option cannot be a stream in sync mode');
+ }
+
+ const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
+ result.code = result.status;
+
+ if (result.error || result.status !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed
+ });
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+};
+
+module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts);
+
+
+/***/ }),
+
+/***/ 966:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const {PassThrough} = __webpack_require__(413);
+
+module.exports = options => {
+ options = Object.assign({}, options);
+
+ const {array} = options;
+ let {encoding} = options;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || buffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (buffer) {
+ encoding = null;
+ }
+
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ stream.on('data', chunk => {
+ ret.push(chunk);
+
+ if (objectMode) {
+ len = ret.length;
+ } else {
+ len += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return ret;
+ }
+
+ return buffer ? Buffer.concat(ret, len) : ret.join('');
+ };
+
+ stream.getBufferedLength = () => len;
+
+ return stream;
+};
+
+
+/***/ }),
+
+/***/ 969:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var wrappy = __webpack_require__(11)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
/***/ }),
/***/ 972:
@@ -8438,7 +35743,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-const tr = __importStar(__webpack_require__(9));
+const tr = __importStar(__webpack_require__(686));
/**
* Exec a command.
* Output will be streamed to the live console.
diff --git a/package-lock.json b/package-lock.json
index db4af11..7b8646e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -257,6 +257,11 @@
"@types/node": "*"
}
},
+ "@types/tmp": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.0.tgz",
+ "integrity": "sha512-flgpHJjntpBAdJD43ShRosQvNC0ME97DCfGvZEDlAThQmnerRXrLbX6YgzRBQCZTthET9eAWFAMaYP0m0Y4HzQ=="
+ },
"@types/uuid": {
"version": "3.4.9",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz",
@@ -923,6 +928,17 @@
"chardet": "^0.7.0",
"iconv-lite": "^0.4.24",
"tmp": "^0.0.33"
+ },
+ "dependencies": {
+ "tmp": {
+ "version": "0.0.33",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
+ "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
+ "dev": true,
+ "requires": {
+ "os-tmpdir": "~1.0.2"
+ }
+ }
}
},
"fast-deep-equal": {
@@ -996,8 +1012,7 @@
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
- "dev": true
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"function-bind": {
"version": "1.1.1",
@@ -1029,7 +1044,6 @@
"version": "7.1.6",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
- "dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
@@ -1125,7 +1139,6 @@
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
- "dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
@@ -1134,8 +1147,7 @@
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
- "dev": true
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"inquirer": {
"version": "7.1.0",
@@ -1625,8 +1637,7 @@
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
- "dev": true
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
},
"path-key": {
"version": "2.0.1",
@@ -2035,12 +2046,21 @@
"dev": true
},
"tmp": {
- "version": "0.0.33",
- "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
- "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
- "dev": true,
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
+ "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==",
"requires": {
- "os-tmpdir": "~1.0.2"
+ "rimraf": "^3.0.0"
+ },
+ "dependencies": {
+ "rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ }
}
},
"tslib": {
diff --git a/package.json b/package.json
index 902255b..b074a8a 100644
--- a/package.json
+++ b/package.json
@@ -9,6 +9,8 @@
"prepare-cache": "cd node_modules/cache && npm run build",
"prepare-deps": "npm run prepare-setup-go && npm run prepare-cache",
"build": "tsc && ncc build -o dist/run/ src/main.ts && ncc build -o dist/post_run/ src/post_main.ts",
+ "watched_build_main": "tsc && ncc build -w -o dist/run/ src/main.ts",
+ "watched_build_post_main": "tsc && ncc build -w -o dist/post_run/ src/post_main.ts",
"type-check": "tsc",
"lint": "eslint --max-warnings 1 **/*.ts --cache",
"lint-fix": "eslint **/*.ts --cache --fix",
@@ -29,8 +31,10 @@
"@actions/github": "^2.1.1",
"@actions/tool-cache": "^1.3.4",
"@types/semver": "^7.1.0",
+ "@types/tmp": "^0.2.0",
"cache": "git+https://github.com/golangci/cache.git",
- "setup-go": "git+https://github.com/actions/setup-go.git"
+ "setup-go": "git+https://github.com/actions/setup-go.git",
+ "tmp": "^0.2.1"
},
"devDependencies": {
"@types/node": "^12.0.4",
diff --git a/sample/sample.go b/sample/sample.go
index ed4b718..80504b0 100644
--- a/sample/sample.go
+++ b/sample/sample.go
@@ -10,6 +10,7 @@ import (
// Hash~
func Hash(data string) string {
retError()
+ retError2()
h := md5.New()
h.Write([]byte(data))
@@ -19,3 +20,7 @@ func Hash(data string) string {
func retError() error {
return errors.New("err")
}
+
+func retError2() error {
+ return errors.New("err2")
+}
diff --git a/src/run.ts b/src/run.ts
index ae08a99..f26c914 100644
--- a/src/run.ts
+++ b/src/run.ts
@@ -1,7 +1,9 @@
import * as core from "@actions/core"
+import * as github from "@actions/github"
import { exec, ExecOptions } from "child_process"
import * as fs from "fs"
import * as path from "path"
+import { dir } from "tmp"
import { promisify } from "util"
import { restoreCache, saveCache } from "./cache"
@@ -9,26 +11,91 @@ import { installGo, installLint } from "./install"
import { findLintVersion } from "./version"
const execShellCommand = promisify(exec)
+const writeFile = promisify(fs.writeFile)
+const createTempDir = promisify(dir)
async function prepareLint(): Promise {
const versionConfig = await findLintVersion()
return await installLint(versionConfig)
}
-async function prepareEnv(): Promise {
+async function fetchPatch(): Promise {
+ const onlyNewIssues = core.getInput(`only-new-issues`, { required: true }).trim()
+ if (onlyNewIssues !== `false` && onlyNewIssues !== `true`) {
+ throw new Error(`invalid value of "only-new-issues": "${onlyNewIssues}", expected "true" or "false"`)
+ }
+ if (onlyNewIssues === `false`) {
+ return ``
+ }
+
+ const ctx = github.context
+ if (ctx.eventName !== `pull_request`) {
+ core.info(`Not fetching patch for showing only new issues because it's not a pull request context: event name is ${ctx.eventName}`)
+ return ``
+ }
+ const pull = ctx.payload.pull_request
+ if (!pull) {
+ core.warning(`No pull request in context`)
+ return ``
+ }
+
+ const octokit = new github.GitHub(core.getInput(`github-token`, { required: true }))
+ let patch: string
+ try {
+ const patchResp = await octokit.pulls.get({
+ owner: ctx.repo.owner,
+ repo: ctx.repo.repo,
+ [`pull_number`]: pull.number,
+ mediaType: {
+ format: `diff`,
+ },
+ })
+
+ if (patchResp.status !== 200) {
+ core.warning(`failed to fetch pull request patch: response status is ${patchResp.status}`)
+ return `` // don't fail the action, but analyze without patch
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ patch = patchResp.data as any
+ } catch (err) {
+ console.warn(`failed to fetch pull request patch:`, err)
+ return `` // don't fail the action, but analyze without patch
+ }
+
+ try {
+ const tempDir = await createTempDir()
+ const patchPath = path.join(tempDir, "pull.patch")
+ core.info(`Writing patch to ${patchPath}`)
+ await writeFile(patchPath, patch)
+ return patchPath
+ } catch (err) {
+ console.warn(`failed to save pull request patch:`, err)
+ return `` // don't fail the action, but analyze without patch
+ }
+}
+
+type Env = {
+ lintPath: string
+ patchPath: string
+}
+
+async function prepareEnv(): Promise {
const startedAt = Date.now()
// Prepare cache, lint and go in parallel.
const restoreCachePromise = restoreCache()
const prepareLintPromise = prepareLint()
const installGoPromise = installGo()
+ const patchPromise = fetchPatch()
const lintPath = await prepareLintPromise
await installGoPromise
await restoreCachePromise
+ const patchPath = await patchPromise
core.info(`Prepared env in ${Date.now() - startedAt}ms`)
- return lintPath
+ return { lintPath, patchPath }
}
type ExecRes = {
@@ -45,21 +112,46 @@ const printOutput = (res: ExecRes): void => {
}
}
-async function runLint(lintPath: string): Promise {
+async function runLint(lintPath: string, patchPath: string): Promise {
const debug = core.getInput(`debug`)
if (debug.split(`,`).includes(`cache`)) {
const res = await execShellCommand(`${lintPath} cache status`)
printOutput(res)
}
- const args = core.getInput(`args`)
- if (args.includes(`-out-format`)) {
+ const userArgs = core.getInput(`args`)
+ const addedArgs: string[] = []
+
+ const userArgNames = new Set()
+ userArgs
+ .split(/\s/)
+ .map(arg => arg.split(`=`)[0])
+ .filter(arg => arg.startsWith(`-`))
+ .forEach(arg => {
+ userArgNames.add(arg.replace(`-`, ``))
+ })
+
+ if (userArgNames.has(`out-format`)) {
throw new Error(`please, don't change out-format for golangci-lint: it can be broken in a future`)
}
+ addedArgs.push(`--out-format=github-actions`)
+
+ if (patchPath && (userArgNames.has(`new`) || userArgNames.has(`new-from-rev`) || userArgNames.has(`new-from-patch`))) {
+ throw new Error(`please, don't specify manually --new* args when requesting only new issues`)
+ }
+ addedArgs.push(`--new-from-patch=${patchPath}`)
+
+ // Override config values.
+ addedArgs.push(`--new=false`)
+ addedArgs.push(`--new-from-rev=`)
const workingDirectory = core.getInput(`working-directory`)
const cmdArgs: ExecOptions = {}
- if (workingDirectory != ``) {
+ if (workingDirectory) {
+ if (patchPath) {
+ // TODO: make them compatible
+ throw new Error(`options working-directory and only-new-issues aren't compatible`)
+ }
if (!fs.existsSync(workingDirectory) || !fs.lstatSync(workingDirectory).isDirectory()) {
throw new Error(`working-directory (${workingDirectory}) was not a path`)
}
@@ -67,7 +159,7 @@ async function runLint(lintPath: string): Promise {
cmdArgs.cwd = path.resolve(workingDirectory)
}
- const cmd = `${lintPath} run --out-format=github-actions ${args}`.trimRight()
+ const cmd = `${lintPath} run ${addedArgs.join(` `)} ${userArgs}`.trimRight()
core.info(`Running [${cmd}] in [${cmdArgs.cwd}] ...`)
const startedAt = Date.now()
try {
@@ -91,8 +183,8 @@ async function runLint(lintPath: string): Promise {
export async function run(): Promise {
try {
- const lintPath = await core.group(`prepare environment`, prepareEnv)
- await core.group(`run golangci-lint`, () => runLint(lintPath))
+ const { lintPath, patchPath } = await core.group(`prepare environment`, prepareEnv)
+ await core.group(`run golangci-lint`, () => runLint(lintPath, patchPath))
} catch (error) {
core.error(`Failed to run: ${error}, ${error.stack}`)
core.setFailed(error.message)