mirror of
https://code.forgejo.org/actions/checkout
synced 2024-11-21 23:29:45 +00:00
31294 lines
No EOL
810 KiB
JavaScript
31294 lines
No EOL
810 KiB
JavaScript
module.exports =
|
|
/******/ (function(modules, runtime) { // webpackBootstrap
|
|
/******/ "use strict";
|
|
/******/ // The module cache
|
|
/******/ var installedModules = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __webpack_require__(moduleId) {
|
|
/******/
|
|
/******/ // Check if module is in cache
|
|
/******/ if(installedModules[moduleId]) {
|
|
/******/ return installedModules[moduleId].exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = installedModules[moduleId] = {
|
|
/******/ i: moduleId,
|
|
/******/ l: false,
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
|
/******/
|
|
/******/ // Flag the module as loaded
|
|
/******/ module.l = true;
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/******/
|
|
/******/ __webpack_require__.ab = __dirname + "/";
|
|
/******/
|
|
/******/ // the startup function
|
|
/******/ function startup() {
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __webpack_require__(198);
|
|
/******/ };
|
|
/******/
|
|
/******/ // run startup
|
|
/******/ return startup();
|
|
/******/ })
|
|
/************************************************************************/
|
|
/******/ ({
|
|
|
|
/***/ 0:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
const { requestLog } = __webpack_require__(916);
|
|
const {
|
|
restEndpointMethods
|
|
} = __webpack_require__(842);
|
|
|
|
const Core = __webpack_require__(529);
|
|
|
|
const CORE_PLUGINS = [
|
|
__webpack_require__(190),
|
|
__webpack_require__(19), // deprecated: remove in v17
|
|
requestLog,
|
|
__webpack_require__(148),
|
|
restEndpointMethods,
|
|
__webpack_require__(430),
|
|
|
|
__webpack_require__(850) // deprecated: remove in v17
|
|
];
|
|
|
|
const OctokitRest = Core.plugin(CORE_PLUGINS);
|
|
|
|
function DeprecatedOctokit(options) {
|
|
const warn =
|
|
options && options.log && options.log.warn
|
|
? options.log.warn
|
|
: console.warn;
|
|
warn(
|
|
'[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead'
|
|
);
|
|
return new OctokitRest(options);
|
|
}
|
|
|
|
const Octokit = Object.assign(DeprecatedOctokit, {
|
|
Octokit: OctokitRest
|
|
});
|
|
|
|
Object.keys(OctokitRest).forEach(key => {
|
|
/* istanbul ignore else */
|
|
if (OctokitRest.hasOwnProperty(key)) {
|
|
Octokit[key] = OctokitRest[key];
|
|
}
|
|
});
|
|
|
|
module.exports = Octokit;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const childProcess = __webpack_require__(129);
|
|
const path = __webpack_require__(622);
|
|
const util_1 = __webpack_require__(669);
|
|
const ioUtil = __webpack_require__(672);
|
|
const exec = util_1.promisify(childProcess.exec);
|
|
/**
|
|
* Copies a file or folder.
|
|
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See CopyOptions.
|
|
*/
|
|
function cp(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const { force, recursive } = readCopyOptions(options);
|
|
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
|
|
// Dest is an existing file, but not forcing
|
|
if (destStat && destStat.isFile() && !force) {
|
|
return;
|
|
}
|
|
// If dest is an existing directory, should copy inside.
|
|
const newDest = destStat && destStat.isDirectory()
|
|
? path.join(dest, path.basename(source))
|
|
: dest;
|
|
if (!(yield ioUtil.exists(source))) {
|
|
throw new Error(`no such file or directory: ${source}`);
|
|
}
|
|
const sourceStat = yield ioUtil.stat(source);
|
|
if (sourceStat.isDirectory()) {
|
|
if (!recursive) {
|
|
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
|
|
}
|
|
else {
|
|
yield cpDirRecursive(source, newDest, 0, force);
|
|
}
|
|
}
|
|
else {
|
|
if (path.relative(source, newDest) === '') {
|
|
// a file cannot be copied to itself
|
|
throw new Error(`'${newDest}' and '${source}' are the same file`);
|
|
}
|
|
yield copyFile(source, newDest, force);
|
|
}
|
|
});
|
|
}
|
|
exports.cp = cp;
|
|
/**
|
|
* Moves a path.
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See MoveOptions.
|
|
*/
|
|
function mv(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (yield ioUtil.exists(dest)) {
|
|
let destExists = true;
|
|
if (yield ioUtil.isDirectory(dest)) {
|
|
// If dest is directory copy src into dest
|
|
dest = path.join(dest, path.basename(source));
|
|
destExists = yield ioUtil.exists(dest);
|
|
}
|
|
if (destExists) {
|
|
if (options.force == null || options.force) {
|
|
yield rmRF(dest);
|
|
}
|
|
else {
|
|
throw new Error('Destination already exists');
|
|
}
|
|
}
|
|
}
|
|
yield mkdirP(path.dirname(dest));
|
|
yield ioUtil.rename(source, dest);
|
|
});
|
|
}
|
|
exports.mv = mv;
|
|
/**
|
|
* Remove a path recursively with force
|
|
*
|
|
* @param inputPath path to remove
|
|
*/
|
|
function rmRF(inputPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
|
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
|
try {
|
|
if (yield ioUtil.isDirectory(inputPath, true)) {
|
|
yield exec(`rd /s /q "${inputPath}"`);
|
|
}
|
|
else {
|
|
yield exec(`del /f /a "${inputPath}"`);
|
|
}
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
|
|
try {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
}
|
|
else {
|
|
let isDir = false;
|
|
try {
|
|
isDir = yield ioUtil.isDirectory(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
return;
|
|
}
|
|
if (isDir) {
|
|
yield exec(`rm -rf "${inputPath}"`);
|
|
}
|
|
else {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.rmRF = rmRF;
|
|
/**
|
|
* Make a directory. Creates the full path with folders in between
|
|
* Will throw if it fails
|
|
*
|
|
* @param fsPath path to create
|
|
* @returns Promise<void>
|
|
*/
|
|
function mkdirP(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield ioUtil.mkdirP(fsPath);
|
|
});
|
|
}
|
|
exports.mkdirP = mkdirP;
|
|
/**
|
|
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
|
|
* If you check and the tool does not exist, it will throw.
|
|
*
|
|
* @param tool name of the tool
|
|
* @param check whether to check if tool exists
|
|
* @returns Promise<string> path to tool
|
|
*/
|
|
function which(tool, check) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!tool) {
|
|
throw new Error("parameter 'tool' is required");
|
|
}
|
|
// recursive when check=true
|
|
if (check) {
|
|
const result = yield which(tool, false);
|
|
if (!result) {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
|
|
}
|
|
else {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
|
|
}
|
|
}
|
|
}
|
|
try {
|
|
// build the list of extensions to try
|
|
const extensions = [];
|
|
if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {
|
|
for (const extension of process.env.PATHEXT.split(path.delimiter)) {
|
|
if (extension) {
|
|
extensions.push(extension);
|
|
}
|
|
}
|
|
}
|
|
// if it's rooted, return it if exists. otherwise return empty.
|
|
if (ioUtil.isRooted(tool)) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
|
if (filePath) {
|
|
return filePath;
|
|
}
|
|
return '';
|
|
}
|
|
// if any path separators, return empty
|
|
if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) {
|
|
return '';
|
|
}
|
|
// build the list of directories
|
|
//
|
|
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
|
|
// it feels like we should not do this. Checking the current directory seems like more of a use
|
|
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
|
|
// across platforms.
|
|
const directories = [];
|
|
if (process.env.PATH) {
|
|
for (const p of process.env.PATH.split(path.delimiter)) {
|
|
if (p) {
|
|
directories.push(p);
|
|
}
|
|
}
|
|
}
|
|
// return the first match
|
|
for (const directory of directories) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);
|
|
if (filePath) {
|
|
return filePath;
|
|
}
|
|
}
|
|
return '';
|
|
}
|
|
catch (err) {
|
|
throw new Error(`which failed with message ${err.message}`);
|
|
}
|
|
});
|
|
}
|
|
exports.which = which;
|
|
function readCopyOptions(options) {
|
|
const force = options.force == null ? true : options.force;
|
|
const recursive = Boolean(options.recursive);
|
|
return { force, recursive };
|
|
}
|
|
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Ensure there is not a run away recursive copy
|
|
if (currentDepth >= 255)
|
|
return;
|
|
currentDepth++;
|
|
yield mkdirP(destDir);
|
|
const files = yield ioUtil.readdir(sourceDir);
|
|
for (const fileName of files) {
|
|
const srcFile = `${sourceDir}/${fileName}`;
|
|
const destFile = `${destDir}/${fileName}`;
|
|
const srcFileStat = yield ioUtil.lstat(srcFile);
|
|
if (srcFileStat.isDirectory()) {
|
|
// Recurse
|
|
yield cpDirRecursive(srcFile, destFile, currentDepth, force);
|
|
}
|
|
else {
|
|
yield copyFile(srcFile, destFile, force);
|
|
}
|
|
}
|
|
// Change the mode for the newly created directory
|
|
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
|
|
});
|
|
}
|
|
// Buffered file copy
|
|
function copyFile(srcFile, destFile, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
|
|
// unlink/re-link it
|
|
try {
|
|
yield ioUtil.lstat(destFile);
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
catch (e) {
|
|
// Try to override file permission
|
|
if (e.code === 'EPERM') {
|
|
yield ioUtil.chmod(destFile, '0666');
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
// other errors = it doesn't exist, no work to do
|
|
}
|
|
// Copy over symlink
|
|
const symlinkFull = yield ioUtil.readlink(srcFile);
|
|
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
|
|
}
|
|
else if (!(yield ioUtil.exists(destFile)) || force) {
|
|
yield ioUtil.copyFile(srcFile, destFile);
|
|
}
|
|
});
|
|
}
|
|
//# sourceMappingURL=io.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const os = __webpack_require__(87);
|
|
const macosRelease = __webpack_require__(118);
|
|
const winRelease = __webpack_require__(49);
|
|
|
|
const osName = (platform, release) => {
|
|
if (!platform && release) {
|
|
throw new Error('You can\'t specify a `release` without specifying `platform`');
|
|
}
|
|
|
|
platform = platform || os.platform();
|
|
|
|
let id;
|
|
|
|
if (platform === 'darwin') {
|
|
if (!release && os.platform() === 'darwin') {
|
|
release = os.release();
|
|
}
|
|
|
|
const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS';
|
|
id = release ? macosRelease(release).name : '';
|
|
return prefix + (id ? ' ' + id : '');
|
|
}
|
|
|
|
if (platform === 'linux') {
|
|
if (!release && os.platform() === 'linux') {
|
|
release = os.release();
|
|
}
|
|
|
|
id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : '';
|
|
return 'Linux' + (id ? ' ' + id : '');
|
|
}
|
|
|
|
if (platform === 'win32') {
|
|
if (!release && os.platform() === 'win32') {
|
|
release = os.release();
|
|
}
|
|
|
|
id = release ? winRelease(release) : '';
|
|
return 'Windows' + (id ? ' ' + id : '');
|
|
}
|
|
|
|
return platform;
|
|
};
|
|
|
|
module.exports = osName;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var once = __webpack_require__(969);
|
|
|
|
var noop = function() {};
|
|
|
|
var isRequest = function(stream) {
|
|
return stream.setHeader && typeof stream.abort === 'function';
|
|
};
|
|
|
|
var isChildProcess = function(stream) {
|
|
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
|
|
};
|
|
|
|
var eos = function(stream, opts, callback) {
|
|
if (typeof opts === 'function') return eos(stream, null, opts);
|
|
if (!opts) opts = {};
|
|
|
|
callback = once(callback || noop);
|
|
|
|
var ws = stream._writableState;
|
|
var rs = stream._readableState;
|
|
var readable = opts.readable || (opts.readable !== false && stream.readable);
|
|
var writable = opts.writable || (opts.writable !== false && stream.writable);
|
|
var cancelled = false;
|
|
|
|
var onlegacyfinish = function() {
|
|
if (!stream.writable) onfinish();
|
|
};
|
|
|
|
var onfinish = function() {
|
|
writable = false;
|
|
if (!readable) callback.call(stream);
|
|
};
|
|
|
|
var onend = function() {
|
|
readable = false;
|
|
if (!writable) callback.call(stream);
|
|
};
|
|
|
|
var onexit = function(exitCode) {
|
|
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
|
|
};
|
|
|
|
var onerror = function(err) {
|
|
callback.call(stream, err);
|
|
};
|
|
|
|
var onclose = function() {
|
|
process.nextTick(onclosenexttick);
|
|
};
|
|
|
|
var onclosenexttick = function() {
|
|
if (cancelled) return;
|
|
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
|
|
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
|
|
};
|
|
|
|
var onrequest = function() {
|
|
stream.req.on('finish', onfinish);
|
|
};
|
|
|
|
if (isRequest(stream)) {
|
|
stream.on('complete', onfinish);
|
|
stream.on('abort', onclose);
|
|
if (stream.req) onrequest();
|
|
else stream.on('request', onrequest);
|
|
} else if (writable && !ws) { // legacy streams
|
|
stream.on('end', onlegacyfinish);
|
|
stream.on('close', onlegacyfinish);
|
|
}
|
|
|
|
if (isChildProcess(stream)) stream.on('exit', onexit);
|
|
|
|
stream.on('end', onend);
|
|
stream.on('finish', onfinish);
|
|
if (opts.error !== false) stream.on('error', onerror);
|
|
stream.on('close', onclose);
|
|
|
|
return function() {
|
|
cancelled = true;
|
|
stream.removeListener('complete', onfinish);
|
|
stream.removeListener('abort', onclose);
|
|
stream.removeListener('request', onrequest);
|
|
if (stream.req) stream.req.removeListener('finish', onfinish);
|
|
stream.removeListener('end', onlegacyfinish);
|
|
stream.removeListener('close', onlegacyfinish);
|
|
stream.removeListener('finish', onfinish);
|
|
stream.removeListener('exit', onexit);
|
|
stream.removeListener('end', onend);
|
|
stream.removeListener('error', onerror);
|
|
stream.removeListener('close', onclose);
|
|
};
|
|
};
|
|
|
|
module.exports = eos;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const os = __webpack_require__(87);
|
|
const events = __webpack_require__(614);
|
|
const child = __webpack_require__(129);
|
|
/* eslint-disable @typescript-eslint/unbound-method */
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
/*
|
|
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
|
|
*/
|
|
class ToolRunner extends events.EventEmitter {
|
|
constructor(toolPath, args, options) {
|
|
super();
|
|
if (!toolPath) {
|
|
throw new Error("Parameter 'toolPath' cannot be null or empty.");
|
|
}
|
|
this.toolPath = toolPath;
|
|
this.args = args || [];
|
|
this.options = options || {};
|
|
}
|
|
_debug(message) {
|
|
if (this.options.listeners && this.options.listeners.debug) {
|
|
this.options.listeners.debug(message);
|
|
}
|
|
}
|
|
_getCommandString(options, noPrefix) {
|
|
const toolPath = this._getSpawnFileName();
|
|
const args = this._getSpawnArgs(options);
|
|
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
|
|
if (IS_WINDOWS) {
|
|
// Windows + cmd file
|
|
if (this._isCmdFile()) {
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows + verbatim
|
|
else if (options.windowsVerbatimArguments) {
|
|
cmd += `"${toolPath}"`;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows (regular)
|
|
else {
|
|
cmd += this._windowsQuoteCmdArg(toolPath);
|
|
for (const a of args) {
|
|
cmd += ` ${this._windowsQuoteCmdArg(a)}`;
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
// OSX/Linux - this can likely be improved with some form of quoting.
|
|
// creating processes on Unix is fundamentally different than Windows.
|
|
// on Unix, execvp() takes an arg array.
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
return cmd;
|
|
}
|
|
_processLineBuffer(data, strBuffer, onLine) {
|
|
try {
|
|
let s = strBuffer + data.toString();
|
|
let n = s.indexOf(os.EOL);
|
|
while (n > -1) {
|
|
const line = s.substring(0, n);
|
|
onLine(line);
|
|
// the rest of the string ...
|
|
s = s.substring(n + os.EOL.length);
|
|
n = s.indexOf(os.EOL);
|
|
}
|
|
strBuffer = s;
|
|
}
|
|
catch (err) {
|
|
// streaming lines to console is best effort. Don't fail a build.
|
|
this._debug(`error processing line. Failed with error ${err}`);
|
|
}
|
|
}
|
|
_getSpawnFileName() {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
return process.env['COMSPEC'] || 'cmd.exe';
|
|
}
|
|
}
|
|
return this.toolPath;
|
|
}
|
|
_getSpawnArgs(options) {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
|
|
for (const a of this.args) {
|
|
argline += ' ';
|
|
argline += options.windowsVerbatimArguments
|
|
? a
|
|
: this._windowsQuoteCmdArg(a);
|
|
}
|
|
argline += '"';
|
|
return [argline];
|
|
}
|
|
}
|
|
return this.args;
|
|
}
|
|
_endsWith(str, end) {
|
|
return str.endsWith(end);
|
|
}
|
|
_isCmdFile() {
|
|
const upperToolPath = this.toolPath.toUpperCase();
|
|
return (this._endsWith(upperToolPath, '.CMD') ||
|
|
this._endsWith(upperToolPath, '.BAT'));
|
|
}
|
|
_windowsQuoteCmdArg(arg) {
|
|
// for .exe, apply the normal quoting rules that libuv applies
|
|
if (!this._isCmdFile()) {
|
|
return this._uvQuoteCmdArg(arg);
|
|
}
|
|
// otherwise apply quoting rules specific to the cmd.exe command line parser.
|
|
// the libuv rules are generic and are not designed specifically for cmd.exe
|
|
// command line parser.
|
|
//
|
|
// for a detailed description of the cmd.exe command line parser, refer to
|
|
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
|
|
// need quotes for empty arg
|
|
if (!arg) {
|
|
return '""';
|
|
}
|
|
// determine whether the arg needs to be quoted
|
|
const cmdSpecialChars = [
|
|
' ',
|
|
'\t',
|
|
'&',
|
|
'(',
|
|
')',
|
|
'[',
|
|
']',
|
|
'{',
|
|
'}',
|
|
'^',
|
|
'=',
|
|
';',
|
|
'!',
|
|
"'",
|
|
'+',
|
|
',',
|
|
'`',
|
|
'~',
|
|
'|',
|
|
'<',
|
|
'>',
|
|
'"'
|
|
];
|
|
let needsQuotes = false;
|
|
for (const char of arg) {
|
|
if (cmdSpecialChars.some(x => x === char)) {
|
|
needsQuotes = true;
|
|
break;
|
|
}
|
|
}
|
|
// short-circuit if quotes not needed
|
|
if (!needsQuotes) {
|
|
return arg;
|
|
}
|
|
// the following quoting rules are very similar to the rules that by libuv applies.
|
|
//
|
|
// 1) wrap the string in quotes
|
|
//
|
|
// 2) double-up quotes - i.e. " => ""
|
|
//
|
|
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
|
|
// doesn't work well with a cmd.exe command line.
|
|
//
|
|
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
|
|
// for example, the command line:
|
|
// foo.exe "myarg:""my val"""
|
|
// is parsed by a .NET console app into an arg array:
|
|
// [ "myarg:\"my val\"" ]
|
|
// which is the same end result when applying libuv quoting rules. although the actual
|
|
// command line from libuv quoting rules would look like:
|
|
// foo.exe "myarg:\"my val\""
|
|
//
|
|
// 3) double-up slashes that precede a quote,
|
|
// e.g. hello \world => "hello \world"
|
|
// hello\"world => "hello\\""world"
|
|
// hello\\"world => "hello\\\\""world"
|
|
// hello world\ => "hello world\\"
|
|
//
|
|
// technically this is not required for a cmd.exe command line, or the batch argument parser.
|
|
// the reasons for including this as a .cmd quoting rule are:
|
|
//
|
|
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
|
|
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
|
|
//
|
|
// b) it's what we've been doing previously (by deferring to node default behavior) and we
|
|
// haven't heard any complaints about that aspect.
|
|
//
|
|
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
|
|
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
|
|
// by using %%.
|
|
//
|
|
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
|
|
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
|
|
//
|
|
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
|
|
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
|
|
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
|
|
// to an external program.
|
|
//
|
|
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
|
|
// % can be escaped within a .cmd file.
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\'; // double the slash
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '"'; // double the quote
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_uvQuoteCmdArg(arg) {
|
|
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
|
|
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
|
|
// is used.
|
|
//
|
|
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
|
|
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
|
|
// pasting copyright notice from Node within this function:
|
|
//
|
|
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
|
//
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
// of this software and associated documentation files (the "Software"), to
|
|
// deal in the Software without restriction, including without limitation the
|
|
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
// sell copies of the Software, and to permit persons to whom the Software is
|
|
// furnished to do so, subject to the following conditions:
|
|
//
|
|
// The above copyright notice and this permission notice shall be included in
|
|
// all copies or substantial portions of the Software.
|
|
//
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
// IN THE SOFTWARE.
|
|
if (!arg) {
|
|
// Need double quotation for empty argument
|
|
return '""';
|
|
}
|
|
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
|
|
// No quotation needed
|
|
return arg;
|
|
}
|
|
if (!arg.includes('"') && !arg.includes('\\')) {
|
|
// No embedded double quotes or backslashes, so I can just wrap
|
|
// quote marks around the whole thing.
|
|
return `"${arg}"`;
|
|
}
|
|
// Expected input/output:
|
|
// input : hello"world
|
|
// output: "hello\"world"
|
|
// input : hello""world
|
|
// output: "hello\"\"world"
|
|
// input : hello\world
|
|
// output: hello\world
|
|
// input : hello\\world
|
|
// output: hello\\world
|
|
// input : hello\"world
|
|
// output: "hello\\\"world"
|
|
// input : hello\\"world
|
|
// output: "hello\\\\\"world"
|
|
// input : hello world\
|
|
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
|
|
// but it appears the comment is wrong, it should be "hello world\\"
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\';
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '\\';
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_cloneExecOptions(options) {
|
|
options = options || {};
|
|
const result = {
|
|
cwd: options.cwd || process.cwd(),
|
|
env: options.env || process.env,
|
|
silent: options.silent || false,
|
|
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
|
|
failOnStdErr: options.failOnStdErr || false,
|
|
ignoreReturnCode: options.ignoreReturnCode || false,
|
|
delay: options.delay || 10000
|
|
};
|
|
result.outStream = options.outStream || process.stdout;
|
|
result.errStream = options.errStream || process.stderr;
|
|
return result;
|
|
}
|
|
_getSpawnOptions(options, toolPath) {
|
|
options = options || {};
|
|
const result = {};
|
|
result.cwd = options.cwd;
|
|
result.env = options.env;
|
|
result['windowsVerbatimArguments'] =
|
|
options.windowsVerbatimArguments || this._isCmdFile();
|
|
if (options.windowsVerbatimArguments) {
|
|
result.argv0 = `"${toolPath}"`;
|
|
}
|
|
return result;
|
|
}
|
|
/**
|
|
* Exec a tool.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param tool path to tool to exec
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns number
|
|
*/
|
|
exec() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => {
|
|
this._debug(`exec tool: ${this.toolPath}`);
|
|
this._debug('arguments:');
|
|
for (const arg of this.args) {
|
|
this._debug(` ${arg}`);
|
|
}
|
|
const optionsNonNull = this._cloneExecOptions(this.options);
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
|
|
}
|
|
const state = new ExecState(optionsNonNull, this.toolPath);
|
|
state.on('debug', (message) => {
|
|
this._debug(message);
|
|
});
|
|
const fileName = this._getSpawnFileName();
|
|
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
|
|
const stdbuffer = '';
|
|
if (cp.stdout) {
|
|
cp.stdout.on('data', (data) => {
|
|
if (this.options.listeners && this.options.listeners.stdout) {
|
|
this.options.listeners.stdout(data);
|
|
}
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(data);
|
|
}
|
|
this._processLineBuffer(data, stdbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.stdline) {
|
|
this.options.listeners.stdline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
const errbuffer = '';
|
|
if (cp.stderr) {
|
|
cp.stderr.on('data', (data) => {
|
|
state.processStderr = true;
|
|
if (this.options.listeners && this.options.listeners.stderr) {
|
|
this.options.listeners.stderr(data);
|
|
}
|
|
if (!optionsNonNull.silent &&
|
|
optionsNonNull.errStream &&
|
|
optionsNonNull.outStream) {
|
|
const s = optionsNonNull.failOnStdErr
|
|
? optionsNonNull.errStream
|
|
: optionsNonNull.outStream;
|
|
s.write(data);
|
|
}
|
|
this._processLineBuffer(data, errbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.errline) {
|
|
this.options.listeners.errline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
cp.on('error', (err) => {
|
|
state.processError = err.message;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('exit', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('close', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
state.on('done', (error, exitCode) => {
|
|
if (stdbuffer.length > 0) {
|
|
this.emit('stdline', stdbuffer);
|
|
}
|
|
if (errbuffer.length > 0) {
|
|
this.emit('errline', errbuffer);
|
|
}
|
|
cp.removeAllListeners();
|
|
if (error) {
|
|
reject(error);
|
|
}
|
|
else {
|
|
resolve(exitCode);
|
|
}
|
|
});
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.ToolRunner = ToolRunner;
|
|
/**
|
|
* Convert an arg string to an array of args. Handles escaping
|
|
*
|
|
* @param argString string of arguments
|
|
* @returns string[] array of arguments
|
|
*/
|
|
function argStringToArray(argString) {
|
|
const args = [];
|
|
let inQuotes = false;
|
|
let escaped = false;
|
|
let arg = '';
|
|
function append(c) {
|
|
// we only escape double quotes.
|
|
if (escaped && c !== '"') {
|
|
arg += '\\';
|
|
}
|
|
arg += c;
|
|
escaped = false;
|
|
}
|
|
for (let i = 0; i < argString.length; i++) {
|
|
const c = argString.charAt(i);
|
|
if (c === '"') {
|
|
if (!escaped) {
|
|
inQuotes = !inQuotes;
|
|
}
|
|
else {
|
|
append(c);
|
|
}
|
|
continue;
|
|
}
|
|
if (c === '\\' && escaped) {
|
|
append(c);
|
|
continue;
|
|
}
|
|
if (c === '\\' && inQuotes) {
|
|
escaped = true;
|
|
continue;
|
|
}
|
|
if (c === ' ' && !inQuotes) {
|
|
if (arg.length > 0) {
|
|
args.push(arg);
|
|
arg = '';
|
|
}
|
|
continue;
|
|
}
|
|
append(c);
|
|
}
|
|
if (arg.length > 0) {
|
|
args.push(arg.trim());
|
|
}
|
|
return args;
|
|
}
|
|
exports.argStringToArray = argStringToArray;
|
|
class ExecState extends events.EventEmitter {
|
|
constructor(options, toolPath) {
|
|
super();
|
|
this.processClosed = false; // tracks whether the process has exited and stdio is closed
|
|
this.processError = '';
|
|
this.processExitCode = 0;
|
|
this.processExited = false; // tracks whether the process has exited
|
|
this.processStderr = false; // tracks whether stderr was written to
|
|
this.delay = 10000; // 10 seconds
|
|
this.done = false;
|
|
this.timeout = null;
|
|
if (!toolPath) {
|
|
throw new Error('toolPath must not be empty');
|
|
}
|
|
this.options = options;
|
|
this.toolPath = toolPath;
|
|
if (options.delay) {
|
|
this.delay = options.delay;
|
|
}
|
|
}
|
|
CheckComplete() {
|
|
if (this.done) {
|
|
return;
|
|
}
|
|
if (this.processClosed) {
|
|
this._setResult();
|
|
}
|
|
else if (this.processExited) {
|
|
this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
|
|
}
|
|
}
|
|
_debug(message) {
|
|
this.emit('debug', message);
|
|
}
|
|
_setResult() {
|
|
// determine whether there is an error
|
|
let error;
|
|
if (this.processExited) {
|
|
if (this.processError) {
|
|
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
|
|
}
|
|
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
|
|
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
|
|
}
|
|
else if (this.processStderr && this.options.failOnStdErr) {
|
|
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
|
|
}
|
|
}
|
|
// clear the timeout
|
|
if (this.timeout) {
|
|
clearTimeout(this.timeout);
|
|
this.timeout = null;
|
|
}
|
|
this.done = true;
|
|
this.emit('done', error, this.processExitCode);
|
|
}
|
|
static HandleTimeout(state) {
|
|
if (state.done) {
|
|
return;
|
|
}
|
|
if (!state.processClosed && state.processExited) {
|
|
const message = `The STDIO streams did not close within ${state.delay /
|
|
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
|
|
state._debug(message);
|
|
}
|
|
state._setResult();
|
|
}
|
|
}
|
|
//# sourceMappingURL=toolrunner.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 11:
|
|
/***/ (function(module) {
|
|
|
|
// Returns a wrapper function that returns a wrapped callback
|
|
// The wrapper function should do some stuff, and return a
|
|
// presumably different callback function.
|
|
// This makes sure that own properties are retained, so that
|
|
// decorations and such are not lost along the way.
|
|
module.exports = wrappy
|
|
function wrappy (fn, cb) {
|
|
if (fn && cb) return wrappy(fn)(cb)
|
|
|
|
if (typeof fn !== 'function')
|
|
throw new TypeError('need wrapper function')
|
|
|
|
Object.keys(fn).forEach(function (k) {
|
|
wrapper[k] = fn[k]
|
|
})
|
|
|
|
return wrapper
|
|
|
|
function wrapper() {
|
|
var args = new Array(arguments.length)
|
|
for (var i = 0; i < args.length; i++) {
|
|
args[i] = arguments[i]
|
|
}
|
|
var ret = fn.apply(this, args)
|
|
var cb = args[args.length-1]
|
|
if (typeof ret === 'function' && ret !== cb) {
|
|
Object.keys(cb).forEach(function (k) {
|
|
ret[k] = cb[k]
|
|
})
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 16:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("tls");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 18:
|
|
/***/ (function() {
|
|
|
|
eval("require")("encoding");
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 19:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationPlugin;
|
|
|
|
const { Deprecation } = __webpack_require__(692);
|
|
const once = __webpack_require__(969);
|
|
|
|
const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
|
|
|
|
const authenticate = __webpack_require__(674);
|
|
const beforeRequest = __webpack_require__(471);
|
|
const requestError = __webpack_require__(349);
|
|
|
|
function authenticationPlugin(octokit, options) {
|
|
if (options.auth) {
|
|
octokit.authenticate = () => {
|
|
deprecateAuthenticate(
|
|
octokit.log,
|
|
new Deprecation(
|
|
'[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor'
|
|
)
|
|
);
|
|
};
|
|
return;
|
|
}
|
|
const state = {
|
|
octokit,
|
|
auth: false
|
|
};
|
|
octokit.authenticate = authenticate.bind(null, state);
|
|
octokit.hook.before("request", beforeRequest.bind(null, state));
|
|
octokit.hook.error("request", requestError.bind(null, state));
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 20:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const cp = __webpack_require__(129);
|
|
const parse = __webpack_require__(568);
|
|
const enoent = __webpack_require__(881);
|
|
|
|
function spawn(command, args, options) {
|
|
// Parse the arguments
|
|
const parsed = parse(command, args, options);
|
|
|
|
// Spawn the child process
|
|
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
|
|
|
|
// Hook into child process "exit" event to emit an error if the command
|
|
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|
enoent.hookChildProcess(spawned, parsed);
|
|
|
|
return spawned;
|
|
}
|
|
|
|
function spawnSync(command, args, options) {
|
|
// Parse the arguments
|
|
const parsed = parse(command, args, options);
|
|
|
|
// Spawn the child process
|
|
const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
|
|
|
|
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
|
|
|
|
return result;
|
|
}
|
|
|
|
module.exports = spawn;
|
|
module.exports.spawn = spawn;
|
|
module.exports.sync = spawnSync;
|
|
|
|
module.exports._parse = parse;
|
|
module.exports._enoent = enoent;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 34:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("https");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 39:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = opts => {
|
|
opts = opts || {};
|
|
|
|
const env = opts.env || process.env;
|
|
const platform = opts.platform || process.platform;
|
|
|
|
if (platform !== 'win32') {
|
|
return 'PATH';
|
|
}
|
|
|
|
return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path';
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 47:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = factory;
|
|
|
|
const Octokit = __webpack_require__(402);
|
|
const registerPlugin = __webpack_require__(855);
|
|
|
|
function factory(plugins) {
|
|
const Api = Octokit.bind(null, plugins || []);
|
|
Api.plugin = registerPlugin.bind(null, plugins || []);
|
|
return Api;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 49:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const os = __webpack_require__(87);
|
|
const execa = __webpack_require__(955);
|
|
|
|
// Reference: https://www.gaijin.at/en/lstwinver.php
|
|
const names = new Map([
|
|
['10.0', '10'],
|
|
['6.3', '8.1'],
|
|
['6.2', '8'],
|
|
['6.1', '7'],
|
|
['6.0', 'Vista'],
|
|
['5.2', 'Server 2003'],
|
|
['5.1', 'XP'],
|
|
['5.0', '2000'],
|
|
['4.9', 'ME'],
|
|
['4.1', '98'],
|
|
['4.0', '95']
|
|
]);
|
|
|
|
const windowsRelease = release => {
|
|
const version = /\d+\.\d/.exec(release || os.release());
|
|
|
|
if (release && !version) {
|
|
throw new Error('`release` argument doesn\'t match `n.n`');
|
|
}
|
|
|
|
const ver = (version || [])[0];
|
|
|
|
// Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
|
|
// If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
|
|
// then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
|
|
// If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead.
|
|
// If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name.
|
|
if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) {
|
|
let stdout;
|
|
try {
|
|
stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || '';
|
|
} catch (_) {
|
|
stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || '';
|
|
}
|
|
|
|
const year = (stdout.match(/2008|2012|2016|2019/) || [])[0];
|
|
|
|
if (year) {
|
|
return `Server ${year}`;
|
|
}
|
|
}
|
|
|
|
return names.get(ver);
|
|
};
|
|
|
|
module.exports = windowsRelease;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 81:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const assert = __importStar(__webpack_require__(357));
|
|
const url_1 = __webpack_require__(835);
|
|
function getFetchUrl(settings) {
|
|
assert.ok(settings.repositoryOwner, 'settings.repositoryOwner must be defined');
|
|
assert.ok(settings.repositoryName, 'settings.repositoryName must be defined');
|
|
const serviceUrl = getServerUrl();
|
|
const encodedOwner = encodeURIComponent(settings.repositoryOwner);
|
|
const encodedName = encodeURIComponent(settings.repositoryName);
|
|
if (settings.sshKey) {
|
|
return `git@${serviceUrl.hostname}:${encodedOwner}/${encodedName}.git`;
|
|
}
|
|
// "origin" is SCHEME://HOSTNAME[:PORT]
|
|
return `${serviceUrl.origin}/${encodedOwner}/${encodedName}`;
|
|
}
|
|
exports.getFetchUrl = getFetchUrl;
|
|
function getServerUrl() {
|
|
// todo: remove GITHUB_URL after support for GHES Alpha is no longer needed
|
|
return new url_1.URL(process.env['GITHUB_SERVER_URL'] ||
|
|
process.env['GITHUB_URL'] ||
|
|
'https://github.com');
|
|
}
|
|
exports.getServerUrl = getServerUrl;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 87:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("os");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 118:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const os = __webpack_require__(87);
|
|
|
|
const nameMap = new Map([
|
|
[19, 'Catalina'],
|
|
[18, 'Mojave'],
|
|
[17, 'High Sierra'],
|
|
[16, 'Sierra'],
|
|
[15, 'El Capitan'],
|
|
[14, 'Yosemite'],
|
|
[13, 'Mavericks'],
|
|
[12, 'Mountain Lion'],
|
|
[11, 'Lion'],
|
|
[10, 'Snow Leopard'],
|
|
[9, 'Leopard'],
|
|
[8, 'Tiger'],
|
|
[7, 'Panther'],
|
|
[6, 'Jaguar'],
|
|
[5, 'Puma']
|
|
]);
|
|
|
|
const macosRelease = release => {
|
|
release = Number((release || os.release()).split('.')[0]);
|
|
return {
|
|
name: nameMap.get(release),
|
|
version: '10.' + (release - 4)
|
|
};
|
|
};
|
|
|
|
module.exports = macosRelease;
|
|
// TODO: remove this in the next major version
|
|
module.exports.default = macosRelease;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 126:
|
|
/***/ (function(module) {
|
|
|
|
/**
|
|
* lodash (Custom Build) <https://lodash.com/>
|
|
* Build: `lodash modularize exports="npm" -o ./`
|
|
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
|
* Released under MIT license <https://lodash.com/license>
|
|
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
|
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
|
*/
|
|
|
|
/** Used as the size to enable large array optimizations. */
|
|
var LARGE_ARRAY_SIZE = 200;
|
|
|
|
/** Used to stand-in for `undefined` hash values. */
|
|
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
|
|
|
/** Used as references for various `Number` constants. */
|
|
var INFINITY = 1 / 0;
|
|
|
|
/** `Object#toString` result references. */
|
|
var funcTag = '[object Function]',
|
|
genTag = '[object GeneratorFunction]';
|
|
|
|
/**
|
|
* Used to match `RegExp`
|
|
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
|
*/
|
|
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
|
|
|
/** Used to detect host constructors (Safari). */
|
|
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
|
|
|
/** Detect free variable `global` from Node.js. */
|
|
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
|
|
|
/** Detect free variable `self`. */
|
|
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
|
|
|
/** Used as a reference to the global object. */
|
|
var root = freeGlobal || freeSelf || Function('return this')();
|
|
|
|
/**
|
|
* A specialized version of `_.includes` for arrays without support for
|
|
* specifying an index to search from.
|
|
*
|
|
* @private
|
|
* @param {Array} [array] The array to inspect.
|
|
* @param {*} target The value to search for.
|
|
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
|
*/
|
|
function arrayIncludes(array, value) {
|
|
var length = array ? array.length : 0;
|
|
return !!length && baseIndexOf(array, value, 0) > -1;
|
|
}
|
|
|
|
/**
|
|
* This function is like `arrayIncludes` except that it accepts a comparator.
|
|
*
|
|
* @private
|
|
* @param {Array} [array] The array to inspect.
|
|
* @param {*} target The value to search for.
|
|
* @param {Function} comparator The comparator invoked per element.
|
|
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
|
*/
|
|
function arrayIncludesWith(array, value, comparator) {
|
|
var index = -1,
|
|
length = array ? array.length : 0;
|
|
|
|
while (++index < length) {
|
|
if (comparator(value, array[index])) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.findIndex` and `_.findLastIndex` without
|
|
* support for iteratee shorthands.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {Function} predicate The function invoked per iteration.
|
|
* @param {number} fromIndex The index to search from.
|
|
* @param {boolean} [fromRight] Specify iterating from right to left.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function baseFindIndex(array, predicate, fromIndex, fromRight) {
|
|
var length = array.length,
|
|
index = fromIndex + (fromRight ? 1 : -1);
|
|
|
|
while ((fromRight ? index-- : ++index < length)) {
|
|
if (predicate(array[index], index, array)) {
|
|
return index;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.indexOf` without `fromIndex` bounds checks.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {*} value The value to search for.
|
|
* @param {number} fromIndex The index to search from.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function baseIndexOf(array, value, fromIndex) {
|
|
if (value !== value) {
|
|
return baseFindIndex(array, baseIsNaN, fromIndex);
|
|
}
|
|
var index = fromIndex - 1,
|
|
length = array.length;
|
|
|
|
while (++index < length) {
|
|
if (array[index] === value) {
|
|
return index;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.isNaN` without support for number objects.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is `NaN`, else `false`.
|
|
*/
|
|
function baseIsNaN(value) {
|
|
return value !== value;
|
|
}
|
|
|
|
/**
|
|
* Checks if a cache value for `key` exists.
|
|
*
|
|
* @private
|
|
* @param {Object} cache The cache to query.
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function cacheHas(cache, key) {
|
|
return cache.has(key);
|
|
}
|
|
|
|
/**
|
|
* Gets the value at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} [object] The object to query.
|
|
* @param {string} key The key of the property to get.
|
|
* @returns {*} Returns the property value.
|
|
*/
|
|
function getValue(object, key) {
|
|
return object == null ? undefined : object[key];
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a host object in IE < 9.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
|
*/
|
|
function isHostObject(value) {
|
|
// Many host objects are `Object` objects that can coerce to strings
|
|
// despite having improperly defined `toString` methods.
|
|
var result = false;
|
|
if (value != null && typeof value.toString != 'function') {
|
|
try {
|
|
result = !!(value + '');
|
|
} catch (e) {}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Converts `set` to an array of its values.
|
|
*
|
|
* @private
|
|
* @param {Object} set The set to convert.
|
|
* @returns {Array} Returns the values.
|
|
*/
|
|
function setToArray(set) {
|
|
var index = -1,
|
|
result = Array(set.size);
|
|
|
|
set.forEach(function(value) {
|
|
result[++index] = value;
|
|
});
|
|
return result;
|
|
}
|
|
|
|
/** Used for built-in method references. */
|
|
var arrayProto = Array.prototype,
|
|
funcProto = Function.prototype,
|
|
objectProto = Object.prototype;
|
|
|
|
/** Used to detect overreaching core-js shims. */
|
|
var coreJsData = root['__core-js_shared__'];
|
|
|
|
/** Used to detect methods masquerading as native. */
|
|
var maskSrcKey = (function() {
|
|
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
|
return uid ? ('Symbol(src)_1.' + uid) : '';
|
|
}());
|
|
|
|
/** Used to resolve the decompiled source of functions. */
|
|
var funcToString = funcProto.toString;
|
|
|
|
/** Used to check objects for own properties. */
|
|
var hasOwnProperty = objectProto.hasOwnProperty;
|
|
|
|
/**
|
|
* Used to resolve the
|
|
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
|
* of values.
|
|
*/
|
|
var objectToString = objectProto.toString;
|
|
|
|
/** Used to detect if a method is native. */
|
|
var reIsNative = RegExp('^' +
|
|
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
|
|
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
|
);
|
|
|
|
/** Built-in value references. */
|
|
var splice = arrayProto.splice;
|
|
|
|
/* Built-in method references that are verified to be native. */
|
|
var Map = getNative(root, 'Map'),
|
|
Set = getNative(root, 'Set'),
|
|
nativeCreate = getNative(Object, 'create');
|
|
|
|
/**
|
|
* Creates a hash object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function Hash(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the hash.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf Hash
|
|
*/
|
|
function hashClear() {
|
|
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the hash.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf Hash
|
|
* @param {Object} hash The hash to modify.
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function hashDelete(key) {
|
|
return this.has(key) && delete this.__data__[key];
|
|
}
|
|
|
|
/**
|
|
* Gets the hash value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function hashGet(key) {
|
|
var data = this.__data__;
|
|
if (nativeCreate) {
|
|
var result = data[key];
|
|
return result === HASH_UNDEFINED ? undefined : result;
|
|
}
|
|
return hasOwnProperty.call(data, key) ? data[key] : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if a hash value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function hashHas(key) {
|
|
var data = this.__data__;
|
|
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
|
|
}
|
|
|
|
/**
|
|
* Sets the hash `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the hash instance.
|
|
*/
|
|
function hashSet(key, value) {
|
|
var data = this.__data__;
|
|
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `Hash`.
|
|
Hash.prototype.clear = hashClear;
|
|
Hash.prototype['delete'] = hashDelete;
|
|
Hash.prototype.get = hashGet;
|
|
Hash.prototype.has = hashHas;
|
|
Hash.prototype.set = hashSet;
|
|
|
|
/**
|
|
* Creates an list cache object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function ListCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the list cache.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf ListCache
|
|
*/
|
|
function listCacheClear() {
|
|
this.__data__ = [];
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the list cache.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function listCacheDelete(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
return false;
|
|
}
|
|
var lastIndex = data.length - 1;
|
|
if (index == lastIndex) {
|
|
data.pop();
|
|
} else {
|
|
splice.call(data, index, 1);
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Gets the list cache value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function listCacheGet(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
return index < 0 ? undefined : data[index][1];
|
|
}
|
|
|
|
/**
|
|
* Checks if a list cache value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function listCacheHas(key) {
|
|
return assocIndexOf(this.__data__, key) > -1;
|
|
}
|
|
|
|
/**
|
|
* Sets the list cache `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the list cache instance.
|
|
*/
|
|
function listCacheSet(key, value) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
data.push([key, value]);
|
|
} else {
|
|
data[index][1] = value;
|
|
}
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `ListCache`.
|
|
ListCache.prototype.clear = listCacheClear;
|
|
ListCache.prototype['delete'] = listCacheDelete;
|
|
ListCache.prototype.get = listCacheGet;
|
|
ListCache.prototype.has = listCacheHas;
|
|
ListCache.prototype.set = listCacheSet;
|
|
|
|
/**
|
|
* Creates a map cache object to store key-value pairs.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function MapCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the map.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf MapCache
|
|
*/
|
|
function mapCacheClear() {
|
|
this.__data__ = {
|
|
'hash': new Hash,
|
|
'map': new (Map || ListCache),
|
|
'string': new Hash
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the map.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function mapCacheDelete(key) {
|
|
return getMapData(this, key)['delete'](key);
|
|
}
|
|
|
|
/**
|
|
* Gets the map value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function mapCacheGet(key) {
|
|
return getMapData(this, key).get(key);
|
|
}
|
|
|
|
/**
|
|
* Checks if a map value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function mapCacheHas(key) {
|
|
return getMapData(this, key).has(key);
|
|
}
|
|
|
|
/**
|
|
* Sets the map `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the map cache instance.
|
|
*/
|
|
function mapCacheSet(key, value) {
|
|
getMapData(this, key).set(key, value);
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `MapCache`.
|
|
MapCache.prototype.clear = mapCacheClear;
|
|
MapCache.prototype['delete'] = mapCacheDelete;
|
|
MapCache.prototype.get = mapCacheGet;
|
|
MapCache.prototype.has = mapCacheHas;
|
|
MapCache.prototype.set = mapCacheSet;
|
|
|
|
/**
|
|
*
|
|
* Creates an array cache object to store unique values.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [values] The values to cache.
|
|
*/
|
|
function SetCache(values) {
|
|
var index = -1,
|
|
length = values ? values.length : 0;
|
|
|
|
this.__data__ = new MapCache;
|
|
while (++index < length) {
|
|
this.add(values[index]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Adds `value` to the array cache.
|
|
*
|
|
* @private
|
|
* @name add
|
|
* @memberOf SetCache
|
|
* @alias push
|
|
* @param {*} value The value to cache.
|
|
* @returns {Object} Returns the cache instance.
|
|
*/
|
|
function setCacheAdd(value) {
|
|
this.__data__.set(value, HASH_UNDEFINED);
|
|
return this;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is in the array cache.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf SetCache
|
|
* @param {*} value The value to search for.
|
|
* @returns {number} Returns `true` if `value` is found, else `false`.
|
|
*/
|
|
function setCacheHas(value) {
|
|
return this.__data__.has(value);
|
|
}
|
|
|
|
// Add methods to `SetCache`.
|
|
SetCache.prototype.add = SetCache.prototype.push = setCacheAdd;
|
|
SetCache.prototype.has = setCacheHas;
|
|
|
|
/**
|
|
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {*} key The key to search for.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function assocIndexOf(array, key) {
|
|
var length = array.length;
|
|
while (length--) {
|
|
if (eq(array[length][0], key)) {
|
|
return length;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.isNative` without bad shim checks.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a native function,
|
|
* else `false`.
|
|
*/
|
|
function baseIsNative(value) {
|
|
if (!isObject(value) || isMasked(value)) {
|
|
return false;
|
|
}
|
|
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
|
return pattern.test(toSource(value));
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.uniqBy` without support for iteratee shorthands.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {Function} [iteratee] The iteratee invoked per element.
|
|
* @param {Function} [comparator] The comparator invoked per element.
|
|
* @returns {Array} Returns the new duplicate free array.
|
|
*/
|
|
function baseUniq(array, iteratee, comparator) {
|
|
var index = -1,
|
|
includes = arrayIncludes,
|
|
length = array.length,
|
|
isCommon = true,
|
|
result = [],
|
|
seen = result;
|
|
|
|
if (comparator) {
|
|
isCommon = false;
|
|
includes = arrayIncludesWith;
|
|
}
|
|
else if (length >= LARGE_ARRAY_SIZE) {
|
|
var set = iteratee ? null : createSet(array);
|
|
if (set) {
|
|
return setToArray(set);
|
|
}
|
|
isCommon = false;
|
|
includes = cacheHas;
|
|
seen = new SetCache;
|
|
}
|
|
else {
|
|
seen = iteratee ? [] : result;
|
|
}
|
|
outer:
|
|
while (++index < length) {
|
|
var value = array[index],
|
|
computed = iteratee ? iteratee(value) : value;
|
|
|
|
value = (comparator || value !== 0) ? value : 0;
|
|
if (isCommon && computed === computed) {
|
|
var seenIndex = seen.length;
|
|
while (seenIndex--) {
|
|
if (seen[seenIndex] === computed) {
|
|
continue outer;
|
|
}
|
|
}
|
|
if (iteratee) {
|
|
seen.push(computed);
|
|
}
|
|
result.push(value);
|
|
}
|
|
else if (!includes(seen, computed, comparator)) {
|
|
if (seen !== result) {
|
|
seen.push(computed);
|
|
}
|
|
result.push(value);
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Creates a set object of `values`.
|
|
*
|
|
* @private
|
|
* @param {Array} values The values to add to the set.
|
|
* @returns {Object} Returns the new set.
|
|
*/
|
|
var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) {
|
|
return new Set(values);
|
|
};
|
|
|
|
/**
|
|
* Gets the data for `map`.
|
|
*
|
|
* @private
|
|
* @param {Object} map The map to query.
|
|
* @param {string} key The reference key.
|
|
* @returns {*} Returns the map data.
|
|
*/
|
|
function getMapData(map, key) {
|
|
var data = map.__data__;
|
|
return isKeyable(key)
|
|
? data[typeof key == 'string' ? 'string' : 'hash']
|
|
: data.map;
|
|
}
|
|
|
|
/**
|
|
* Gets the native function at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {string} key The key of the method to get.
|
|
* @returns {*} Returns the function if it's native, else `undefined`.
|
|
*/
|
|
function getNative(object, key) {
|
|
var value = getValue(object, key);
|
|
return baseIsNative(value) ? value : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is suitable for use as unique object key.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
|
*/
|
|
function isKeyable(value) {
|
|
var type = typeof value;
|
|
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
|
? (value !== '__proto__')
|
|
: (value === null);
|
|
}
|
|
|
|
/**
|
|
* Checks if `func` has its source masked.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to check.
|
|
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
|
*/
|
|
function isMasked(func) {
|
|
return !!maskSrcKey && (maskSrcKey in func);
|
|
}
|
|
|
|
/**
|
|
* Converts `func` to its source code.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to process.
|
|
* @returns {string} Returns the source code.
|
|
*/
|
|
function toSource(func) {
|
|
if (func != null) {
|
|
try {
|
|
return funcToString.call(func);
|
|
} catch (e) {}
|
|
try {
|
|
return (func + '');
|
|
} catch (e) {}
|
|
}
|
|
return '';
|
|
}
|
|
|
|
/**
|
|
* Creates a duplicate-free version of an array, using
|
|
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* for equality comparisons, in which only the first occurrence of each
|
|
* element is kept.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Array
|
|
* @param {Array} array The array to inspect.
|
|
* @returns {Array} Returns the new duplicate free array.
|
|
* @example
|
|
*
|
|
* _.uniq([2, 1, 2]);
|
|
* // => [2, 1]
|
|
*/
|
|
function uniq(array) {
|
|
return (array && array.length)
|
|
? baseUniq(array)
|
|
: [];
|
|
}
|
|
|
|
/**
|
|
* Performs a
|
|
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* comparison between two values to determine if they are equivalent.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to compare.
|
|
* @param {*} other The other value to compare.
|
|
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1 };
|
|
* var other = { 'a': 1 };
|
|
*
|
|
* _.eq(object, object);
|
|
* // => true
|
|
*
|
|
* _.eq(object, other);
|
|
* // => false
|
|
*
|
|
* _.eq('a', 'a');
|
|
* // => true
|
|
*
|
|
* _.eq('a', Object('a'));
|
|
* // => false
|
|
*
|
|
* _.eq(NaN, NaN);
|
|
* // => true
|
|
*/
|
|
function eq(value, other) {
|
|
return value === other || (value !== value && other !== other);
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Function` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
|
* @example
|
|
*
|
|
* _.isFunction(_);
|
|
* // => true
|
|
*
|
|
* _.isFunction(/abc/);
|
|
* // => false
|
|
*/
|
|
function isFunction(value) {
|
|
// The use of `Object#toString` avoids issues with the `typeof` operator
|
|
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
|
var tag = isObject(value) ? objectToString.call(value) : '';
|
|
return tag == funcTag || tag == genTag;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is the
|
|
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
|
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObject({});
|
|
* // => true
|
|
*
|
|
* _.isObject([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObject(_.noop);
|
|
* // => true
|
|
*
|
|
* _.isObject(null);
|
|
* // => false
|
|
*/
|
|
function isObject(value) {
|
|
var type = typeof value;
|
|
return !!value && (type == 'object' || type == 'function');
|
|
}
|
|
|
|
/**
|
|
* This method returns `undefined`.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 2.3.0
|
|
* @category Util
|
|
* @example
|
|
*
|
|
* _.times(2, _.noop);
|
|
* // => [undefined, undefined]
|
|
*/
|
|
function noop() {
|
|
// No operation performed.
|
|
}
|
|
|
|
module.exports = uniq;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 129:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("child_process");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 139:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
// Unique ID creation requires a high quality random # generator. In node.js
|
|
// this is pretty straight-forward - we use the crypto API.
|
|
|
|
var crypto = __webpack_require__(417);
|
|
|
|
module.exports = function nodeRNG() {
|
|
return crypto.randomBytes(16);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 141:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __webpack_require__(631);
|
|
var tls = __webpack_require__(16);
|
|
var http = __webpack_require__(605);
|
|
var https = __webpack_require__(34);
|
|
var events = __webpack_require__(614);
|
|
var assert = __webpack_require__(357);
|
|
var util = __webpack_require__(669);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false
|
|
});
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode === 200) {
|
|
assert.equal(head.length, 0);
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
cb(socket);
|
|
} else {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 143:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = withAuthorizationPrefix;
|
|
|
|
const atob = __webpack_require__(368);
|
|
|
|
const REGEX_IS_BASIC_AUTH = /^[\w-]+:/;
|
|
|
|
function withAuthorizationPrefix(authorization) {
|
|
if (/^(basic|bearer|token) /i.test(authorization)) {
|
|
return authorization;
|
|
}
|
|
|
|
try {
|
|
if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) {
|
|
return `basic ${authorization}`;
|
|
}
|
|
} catch (error) {}
|
|
|
|
if (authorization.split(/\./).length === 3) {
|
|
return `bearer ${authorization}`;
|
|
}
|
|
|
|
return `token ${authorization}`;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 145:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const pump = __webpack_require__(453);
|
|
const bufferStream = __webpack_require__(966);
|
|
|
|
class MaxBufferError extends Error {
|
|
constructor() {
|
|
super('maxBuffer exceeded');
|
|
this.name = 'MaxBufferError';
|
|
}
|
|
}
|
|
|
|
function getStream(inputStream, options) {
|
|
if (!inputStream) {
|
|
return Promise.reject(new Error('Expected a stream'));
|
|
}
|
|
|
|
options = Object.assign({maxBuffer: Infinity}, options);
|
|
|
|
const {maxBuffer} = options;
|
|
|
|
let stream;
|
|
return new Promise((resolve, reject) => {
|
|
const rejectPromise = error => {
|
|
if (error) { // A null check
|
|
error.bufferedData = stream.getBufferedValue();
|
|
}
|
|
reject(error);
|
|
};
|
|
|
|
stream = pump(inputStream, bufferStream(options), error => {
|
|
if (error) {
|
|
rejectPromise(error);
|
|
return;
|
|
}
|
|
|
|
resolve();
|
|
});
|
|
|
|
stream.on('data', () => {
|
|
if (stream.getBufferedLength() > maxBuffer) {
|
|
rejectPromise(new MaxBufferError());
|
|
}
|
|
});
|
|
}).then(() => stream.getBufferedValue());
|
|
}
|
|
|
|
module.exports = getStream;
|
|
module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
|
|
module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
|
|
module.exports.MaxBufferError = MaxBufferError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 148:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = paginatePlugin;
|
|
|
|
const { paginateRest } = __webpack_require__(299);
|
|
|
|
function paginatePlugin(octokit) {
|
|
Object.assign(octokit, paginateRest(octokit));
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 153:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const coreCommand = __importStar(__webpack_require__(431));
|
|
/**
|
|
* Indicates whether the POST action is running
|
|
*/
|
|
exports.IsPost = !!process.env['STATE_isPost'];
|
|
/**
|
|
* The repository path for the POST action. The value is empty during the MAIN action.
|
|
*/
|
|
exports.RepositoryPath = process.env['STATE_repositoryPath'] || '';
|
|
/**
|
|
* The SSH key path for the POST action. The value is empty during the MAIN action.
|
|
*/
|
|
exports.SshKeyPath = process.env['STATE_sshKeyPath'] || '';
|
|
/**
|
|
* The SSH known hosts path for the POST action. The value is empty during the MAIN action.
|
|
*/
|
|
exports.SshKnownHostsPath = process.env['STATE_sshKnownHostsPath'] || '';
|
|
/**
|
|
* Save the repository path so the POST action can retrieve the value.
|
|
*/
|
|
function setRepositoryPath(repositoryPath) {
|
|
coreCommand.issueCommand('save-state', { name: 'repositoryPath' }, repositoryPath);
|
|
}
|
|
exports.setRepositoryPath = setRepositoryPath;
|
|
/**
|
|
* Save the SSH key path so the POST action can retrieve the value.
|
|
*/
|
|
function setSshKeyPath(sshKeyPath) {
|
|
coreCommand.issueCommand('save-state', { name: 'sshKeyPath' }, sshKeyPath);
|
|
}
|
|
exports.setSshKeyPath = setSshKeyPath;
|
|
/**
|
|
* Save the SSH known hosts path so the POST action can retrieve the value.
|
|
*/
|
|
function setSshKnownHostsPath(sshKnownHostsPath) {
|
|
coreCommand.issueCommand('save-state', { name: 'sshKnownHostsPath' }, sshKnownHostsPath);
|
|
}
|
|
exports.setSshKnownHostsPath = setSshKnownHostsPath;
|
|
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
|
|
// This is necessary since we don't have a separate entry point.
|
|
if (!exports.IsPost) {
|
|
coreCommand.issueCommand('save-state', { name: 'isPost' }, 'true');
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 168:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
const alias = ['stdin', 'stdout', 'stderr'];
|
|
|
|
const hasAlias = opts => alias.some(x => Boolean(opts[x]));
|
|
|
|
module.exports = opts => {
|
|
if (!opts) {
|
|
return null;
|
|
}
|
|
|
|
if (opts.stdio && hasAlias(opts)) {
|
|
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
|
|
}
|
|
|
|
if (typeof opts.stdio === 'string') {
|
|
return opts.stdio;
|
|
}
|
|
|
|
const stdio = opts.stdio || [];
|
|
|
|
if (!Array.isArray(stdio)) {
|
|
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
|
|
}
|
|
|
|
const result = [];
|
|
const len = Math.max(stdio.length, alias.length);
|
|
|
|
for (let i = 0; i < len; i++) {
|
|
let value = null;
|
|
|
|
if (stdio[i] !== undefined) {
|
|
value = stdio[i];
|
|
} else if (opts[alias[i]] !== undefined) {
|
|
value = opts[alias[i]];
|
|
}
|
|
|
|
result[i] = value;
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 179:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __webpack_require__(631);
|
|
var tls = __webpack_require__(16);
|
|
var http = __webpack_require__(605);
|
|
var https = __webpack_require__(34);
|
|
var events = __webpack_require__(614);
|
|
var assert = __webpack_require__(357);
|
|
var util = __webpack_require__(669);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 190:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationPlugin;
|
|
|
|
const { createTokenAuth } = __webpack_require__(813);
|
|
const { Deprecation } = __webpack_require__(692);
|
|
const once = __webpack_require__(969);
|
|
|
|
const beforeRequest = __webpack_require__(863);
|
|
const requestError = __webpack_require__(991);
|
|
const validate = __webpack_require__(954);
|
|
const withAuthorizationPrefix = __webpack_require__(143);
|
|
|
|
const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation));
|
|
const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation));
|
|
|
|
function authenticationPlugin(octokit, options) {
|
|
// If `options.authStrategy` is set then use it and pass in `options.auth`
|
|
if (options.authStrategy) {
|
|
const auth = options.authStrategy(options.auth);
|
|
octokit.hook.wrap("request", auth.hook);
|
|
octokit.auth = auth;
|
|
return;
|
|
}
|
|
|
|
// If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
|
|
// is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred.
|
|
if (!options.auth) {
|
|
octokit.auth = () =>
|
|
Promise.resolve({
|
|
type: "unauthenticated"
|
|
});
|
|
return;
|
|
}
|
|
|
|
const isBasicAuthString =
|
|
typeof options.auth === "string" &&
|
|
/^basic/.test(withAuthorizationPrefix(options.auth));
|
|
|
|
// If only `options.auth` is set to a string, use the default token authentication strategy.
|
|
if (typeof options.auth === "string" && !isBasicAuthString) {
|
|
const auth = createTokenAuth(options.auth);
|
|
octokit.hook.wrap("request", auth.hook);
|
|
octokit.auth = auth;
|
|
return;
|
|
}
|
|
|
|
// Otherwise log a deprecation message
|
|
const [deprecationMethod, deprecationMessapge] = isBasicAuthString
|
|
? [
|
|
deprecateAuthBasic,
|
|
'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)'
|
|
]
|
|
: [
|
|
deprecateAuthObject,
|
|
'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)'
|
|
];
|
|
deprecationMethod(
|
|
octokit.log,
|
|
new Deprecation("[@octokit/rest] " + deprecationMessapge)
|
|
);
|
|
|
|
octokit.auth = () =>
|
|
Promise.resolve({
|
|
type: "deprecated",
|
|
message: deprecationMessapge
|
|
});
|
|
|
|
validate(options.auth);
|
|
|
|
const state = {
|
|
octokit,
|
|
auth: options.auth
|
|
};
|
|
|
|
octokit.hook.before("request", beforeRequest.bind(null, state));
|
|
octokit.hook.error("request", requestError.bind(null, state));
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 197:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = isexe
|
|
isexe.sync = sync
|
|
|
|
var fs = __webpack_require__(747)
|
|
|
|
function isexe (path, options, cb) {
|
|
fs.stat(path, function (er, stat) {
|
|
cb(er, er ? false : checkStat(stat, options))
|
|
})
|
|
}
|
|
|
|
function sync (path, options) {
|
|
return checkStat(fs.statSync(path), options)
|
|
}
|
|
|
|
function checkStat (stat, options) {
|
|
return stat.isFile() && checkMode(stat, options)
|
|
}
|
|
|
|
function checkMode (stat, options) {
|
|
var mod = stat.mode
|
|
var uid = stat.uid
|
|
var gid = stat.gid
|
|
|
|
var myUid = options.uid !== undefined ?
|
|
options.uid : process.getuid && process.getuid()
|
|
var myGid = options.gid !== undefined ?
|
|
options.gid : process.getgid && process.getgid()
|
|
|
|
var u = parseInt('100', 8)
|
|
var g = parseInt('010', 8)
|
|
var o = parseInt('001', 8)
|
|
var ug = u | g
|
|
|
|
var ret = (mod & o) ||
|
|
(mod & g) && gid === myGid ||
|
|
(mod & u) && uid === myUid ||
|
|
(mod & ug) && myUid === 0
|
|
|
|
return ret
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 198:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const coreCommand = __importStar(__webpack_require__(431));
|
|
const gitSourceProvider = __importStar(__webpack_require__(293));
|
|
const inputHelper = __importStar(__webpack_require__(821));
|
|
const path = __importStar(__webpack_require__(622));
|
|
const stateHelper = __importStar(__webpack_require__(153));
|
|
function run() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
const sourceSettings = inputHelper.getInputs();
|
|
try {
|
|
// Register problem matcher
|
|
coreCommand.issueCommand('add-matcher', {}, path.join(__dirname, 'problem-matcher.json'));
|
|
// Get sources
|
|
yield gitSourceProvider.getSource(sourceSettings);
|
|
}
|
|
finally {
|
|
// Unregister problem matcher
|
|
coreCommand.issueCommand('remove-matcher', { owner: 'checkout-git' }, '');
|
|
}
|
|
}
|
|
catch (error) {
|
|
core.setFailed(error.message);
|
|
}
|
|
});
|
|
}
|
|
function cleanup() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
yield gitSourceProvider.cleanup(stateHelper.RepositoryPath);
|
|
}
|
|
catch (error) {
|
|
core.warning(error.message);
|
|
}
|
|
});
|
|
}
|
|
// Main
|
|
if (!stateHelper.IsPost) {
|
|
run();
|
|
}
|
|
// Post
|
|
else {
|
|
cleanup();
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 211:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var osName = _interopDefault(__webpack_require__(2));
|
|
|
|
function getUserAgent() {
|
|
try {
|
|
return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
|
|
} catch (error) {
|
|
if (/wmic os get Caption/.test(error.message)) {
|
|
return "Windows <version undetectable>";
|
|
}
|
|
|
|
return "<environment undetectable>";
|
|
}
|
|
}
|
|
|
|
exports.getUserAgent = getUserAgent;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 215:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = {"name":"@octokit/rest","version":"16.43.1","publishConfig":{"access":"public"},"description":"GitHub REST API client for Node.js","keywords":["octokit","github","rest","api-client"],"author":"Gregor Martynus (https://github.com/gr2m)","contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"repository":"https://github.com/octokit/rest.js","dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^5.1.2","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"types":"index.d.ts","scripts":{"coverage":"nyc report --reporter=html && open coverage/index.html","lint":"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","lint:fix":"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","pretest":"npm run -s lint","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","build":"npm-run-all build:*","build:ts":"npm run -s update-endpoints:typescript","prebuild:browser":"mkdirp dist/","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","prevalidate:ts":"npm run -s build:ts","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","start-fixtures-server":"octokit-fixtures-server"},"license":"MIT","files":["index.js","index.d.ts","lib","plugins"],"nyc":{"ignore":["test"]},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz","_integrity":"sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==","_from":"@octokit/rest@16.43.1"};
|
|
|
|
/***/ }),
|
|
|
|
/***/ 227:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url_1 = __webpack_require__(835);
|
|
const core = __importStar(__webpack_require__(470));
|
|
const github = __importStar(__webpack_require__(469));
|
|
exports.tagsRefSpec = '+refs/tags/*:refs/tags/*';
|
|
function getCheckoutInfo(git, ref, commit) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!git) {
|
|
throw new Error('Arg git cannot be empty');
|
|
}
|
|
if (!ref && !commit) {
|
|
throw new Error('Args ref and commit cannot both be empty');
|
|
}
|
|
const result = {};
|
|
const upperRef = (ref || '').toUpperCase();
|
|
// SHA only
|
|
if (!ref) {
|
|
result.ref = commit;
|
|
}
|
|
// refs/heads/
|
|
else if (upperRef.startsWith('REFS/HEADS/')) {
|
|
const branch = ref.substring('refs/heads/'.length);
|
|
result.ref = branch;
|
|
result.startPoint = `refs/remotes/origin/${branch}`;
|
|
}
|
|
// refs/pull/
|
|
else if (upperRef.startsWith('REFS/PULL/')) {
|
|
const branch = ref.substring('refs/pull/'.length);
|
|
result.ref = `refs/remotes/pull/${branch}`;
|
|
}
|
|
// refs/tags/
|
|
else if (upperRef.startsWith('REFS/')) {
|
|
result.ref = ref;
|
|
}
|
|
// Unqualified ref, check for a matching branch or tag
|
|
else {
|
|
if (yield git.branchExists(true, `origin/${ref}`)) {
|
|
result.ref = ref;
|
|
result.startPoint = `refs/remotes/origin/${ref}`;
|
|
}
|
|
else if (yield git.tagExists(`${ref}`)) {
|
|
result.ref = `refs/tags/${ref}`;
|
|
}
|
|
else {
|
|
throw new Error(`A branch or tag with the name '${ref}' could not be found`);
|
|
}
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.getCheckoutInfo = getCheckoutInfo;
|
|
function getRefSpecForAllHistory(ref, commit) {
|
|
const result = ['+refs/heads/*:refs/remotes/origin/*', exports.tagsRefSpec];
|
|
if (ref && ref.toUpperCase().startsWith('REFS/PULL/')) {
|
|
const branch = ref.substring('refs/pull/'.length);
|
|
result.push(`+${commit || ref}:refs/remotes/pull/${branch}`);
|
|
}
|
|
return result;
|
|
}
|
|
exports.getRefSpecForAllHistory = getRefSpecForAllHistory;
|
|
function getRefSpec(ref, commit) {
|
|
if (!ref && !commit) {
|
|
throw new Error('Args ref and commit cannot both be empty');
|
|
}
|
|
const upperRef = (ref || '').toUpperCase();
|
|
// SHA
|
|
if (commit) {
|
|
// refs/heads
|
|
if (upperRef.startsWith('REFS/HEADS/')) {
|
|
const branch = ref.substring('refs/heads/'.length);
|
|
return [`+${commit}:refs/remotes/origin/${branch}`];
|
|
}
|
|
// refs/pull/
|
|
else if (upperRef.startsWith('REFS/PULL/')) {
|
|
const branch = ref.substring('refs/pull/'.length);
|
|
return [`+${commit}:refs/remotes/pull/${branch}`];
|
|
}
|
|
// refs/tags/
|
|
else if (upperRef.startsWith('REFS/TAGS/')) {
|
|
return [`+${commit}:${ref}`];
|
|
}
|
|
// Otherwise no destination ref
|
|
else {
|
|
return [commit];
|
|
}
|
|
}
|
|
// Unqualified ref, check for a matching branch or tag
|
|
else if (!upperRef.startsWith('REFS/')) {
|
|
return [
|
|
`+refs/heads/${ref}*:refs/remotes/origin/${ref}*`,
|
|
`+refs/tags/${ref}*:refs/tags/${ref}*`
|
|
];
|
|
}
|
|
// refs/heads/
|
|
else if (upperRef.startsWith('REFS/HEADS/')) {
|
|
const branch = ref.substring('refs/heads/'.length);
|
|
return [`+${ref}:refs/remotes/origin/${branch}`];
|
|
}
|
|
// refs/pull/
|
|
else if (upperRef.startsWith('REFS/PULL/')) {
|
|
const branch = ref.substring('refs/pull/'.length);
|
|
return [`+${ref}:refs/remotes/pull/${branch}`];
|
|
}
|
|
// refs/tags/
|
|
else {
|
|
return [`+${ref}:${ref}`];
|
|
}
|
|
}
|
|
exports.getRefSpec = getRefSpec;
|
|
/**
|
|
* Tests whether the initial fetch created the ref at the expected commit
|
|
*/
|
|
function testRef(git, ref, commit) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!git) {
|
|
throw new Error('Arg git cannot be empty');
|
|
}
|
|
if (!ref && !commit) {
|
|
throw new Error('Args ref and commit cannot both be empty');
|
|
}
|
|
// No SHA? Nothing to test
|
|
if (!commit) {
|
|
return true;
|
|
}
|
|
// SHA only?
|
|
else if (!ref) {
|
|
return yield git.shaExists(commit);
|
|
}
|
|
const upperRef = ref.toUpperCase();
|
|
// refs/heads/
|
|
if (upperRef.startsWith('REFS/HEADS/')) {
|
|
const branch = ref.substring('refs/heads/'.length);
|
|
return ((yield git.branchExists(true, `origin/${branch}`)) &&
|
|
commit === (yield git.revParse(`refs/remotes/origin/${branch}`)));
|
|
}
|
|
// refs/pull/
|
|
else if (upperRef.startsWith('REFS/PULL/')) {
|
|
// Assume matches because fetched using the commit
|
|
return true;
|
|
}
|
|
// refs/tags/
|
|
else if (upperRef.startsWith('REFS/TAGS/')) {
|
|
const tagName = ref.substring('refs/tags/'.length);
|
|
return ((yield git.tagExists(tagName)) && commit === (yield git.revParse(ref)));
|
|
}
|
|
// Unexpected
|
|
else {
|
|
core.debug(`Unexpected ref format '${ref}' when testing ref info`);
|
|
return true;
|
|
}
|
|
});
|
|
}
|
|
exports.testRef = testRef;
|
|
function checkCommitInfo(token, commitInfo, repositoryOwner, repositoryName, ref, commit) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
// GHES?
|
|
if (isGhes()) {
|
|
return;
|
|
}
|
|
// Auth token?
|
|
if (!token) {
|
|
return;
|
|
}
|
|
// Public PR synchronize, for workflow repo?
|
|
if (fromPayload('repository.private') !== false ||
|
|
github.context.eventName !== 'pull_request' ||
|
|
fromPayload('action') !== 'synchronize' ||
|
|
repositoryOwner !== github.context.repo.owner ||
|
|
repositoryName !== github.context.repo.repo ||
|
|
ref !== github.context.ref ||
|
|
!ref.startsWith('refs/pull/') ||
|
|
commit !== github.context.sha) {
|
|
return;
|
|
}
|
|
// Head SHA
|
|
const expectedHeadSha = fromPayload('after');
|
|
if (!expectedHeadSha) {
|
|
core.debug('Unable to determine head sha');
|
|
return;
|
|
}
|
|
// Base SHA
|
|
const expectedBaseSha = fromPayload('pull_request.base.sha');
|
|
if (!expectedBaseSha) {
|
|
core.debug('Unable to determine base sha');
|
|
return;
|
|
}
|
|
// Expected message?
|
|
const expectedMessage = `Merge ${expectedHeadSha} into ${expectedBaseSha}`;
|
|
if (commitInfo.indexOf(expectedMessage) >= 0) {
|
|
return;
|
|
}
|
|
// Extract details from message
|
|
const match = commitInfo.match(/Merge ([0-9a-f]{40}) into ([0-9a-f]{40})/);
|
|
if (!match) {
|
|
core.debug('Unexpected message format');
|
|
return;
|
|
}
|
|
// Post telemetry
|
|
const actualHeadSha = match[1];
|
|
if (actualHeadSha !== expectedHeadSha) {
|
|
core.debug(`Expected head sha ${expectedHeadSha}; actual head sha ${actualHeadSha}`);
|
|
const octokit = new github.GitHub(token, {
|
|
userAgent: `actions-checkout-tracepoint/1.0 (code=STALE_MERGE;owner=${repositoryOwner};repo=${repositoryName};pr=${fromPayload('number')};run_id=${process.env['GITHUB_RUN_ID']};expected_head_sha=${expectedHeadSha};actual_head_sha=${actualHeadSha})`
|
|
});
|
|
yield octokit.repos.get({ owner: repositoryOwner, repo: repositoryName });
|
|
}
|
|
}
|
|
catch (err) {
|
|
core.debug(`Error when validating commit info: ${err.stack}`);
|
|
}
|
|
});
|
|
}
|
|
exports.checkCommitInfo = checkCommitInfo;
|
|
function fromPayload(path) {
|
|
return select(github.context.payload, path);
|
|
}
|
|
function select(obj, path) {
|
|
if (!obj) {
|
|
return undefined;
|
|
}
|
|
const i = path.indexOf('.');
|
|
if (i < 0) {
|
|
return obj[path];
|
|
}
|
|
const key = path.substr(0, i);
|
|
return select(obj[key], path.substr(i + 1));
|
|
}
|
|
function isGhes() {
|
|
const ghUrl = new url_1.URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 260:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
// Note: since nyc uses this module to output coverage, any lines
|
|
// that are in the direct sync flow of nyc's outputCoverage are
|
|
// ignored, since we can never get coverage for them.
|
|
var assert = __webpack_require__(357)
|
|
var signals = __webpack_require__(654)
|
|
|
|
var EE = __webpack_require__(614)
|
|
/* istanbul ignore if */
|
|
if (typeof EE !== 'function') {
|
|
EE = EE.EventEmitter
|
|
}
|
|
|
|
var emitter
|
|
if (process.__signal_exit_emitter__) {
|
|
emitter = process.__signal_exit_emitter__
|
|
} else {
|
|
emitter = process.__signal_exit_emitter__ = new EE()
|
|
emitter.count = 0
|
|
emitter.emitted = {}
|
|
}
|
|
|
|
// Because this emitter is a global, we have to check to see if a
|
|
// previous version of this library failed to enable infinite listeners.
|
|
// I know what you're about to say. But literally everything about
|
|
// signal-exit is a compromise with evil. Get used to it.
|
|
if (!emitter.infinite) {
|
|
emitter.setMaxListeners(Infinity)
|
|
emitter.infinite = true
|
|
}
|
|
|
|
module.exports = function (cb, opts) {
|
|
assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler')
|
|
|
|
if (loaded === false) {
|
|
load()
|
|
}
|
|
|
|
var ev = 'exit'
|
|
if (opts && opts.alwaysLast) {
|
|
ev = 'afterexit'
|
|
}
|
|
|
|
var remove = function () {
|
|
emitter.removeListener(ev, cb)
|
|
if (emitter.listeners('exit').length === 0 &&
|
|
emitter.listeners('afterexit').length === 0) {
|
|
unload()
|
|
}
|
|
}
|
|
emitter.on(ev, cb)
|
|
|
|
return remove
|
|
}
|
|
|
|
module.exports.unload = unload
|
|
function unload () {
|
|
if (!loaded) {
|
|
return
|
|
}
|
|
loaded = false
|
|
|
|
signals.forEach(function (sig) {
|
|
try {
|
|
process.removeListener(sig, sigListeners[sig])
|
|
} catch (er) {}
|
|
})
|
|
process.emit = originalProcessEmit
|
|
process.reallyExit = originalProcessReallyExit
|
|
emitter.count -= 1
|
|
}
|
|
|
|
function emit (event, code, signal) {
|
|
if (emitter.emitted[event]) {
|
|
return
|
|
}
|
|
emitter.emitted[event] = true
|
|
emitter.emit(event, code, signal)
|
|
}
|
|
|
|
// { <signal>: <listener fn>, ... }
|
|
var sigListeners = {}
|
|
signals.forEach(function (sig) {
|
|
sigListeners[sig] = function listener () {
|
|
// If there are no other listeners, an exit is coming!
|
|
// Simplest way: remove us and then re-send the signal.
|
|
// We know that this will kill the process, so we can
|
|
// safely emit now.
|
|
var listeners = process.listeners(sig)
|
|
if (listeners.length === emitter.count) {
|
|
unload()
|
|
emit('exit', null, sig)
|
|
/* istanbul ignore next */
|
|
emit('afterexit', null, sig)
|
|
/* istanbul ignore next */
|
|
process.kill(process.pid, sig)
|
|
}
|
|
}
|
|
})
|
|
|
|
module.exports.signals = function () {
|
|
return signals
|
|
}
|
|
|
|
module.exports.load = load
|
|
|
|
var loaded = false
|
|
|
|
function load () {
|
|
if (loaded) {
|
|
return
|
|
}
|
|
loaded = true
|
|
|
|
// This is the number of onSignalExit's that are in play.
|
|
// It's important so that we can count the correct number of
|
|
// listeners on signals, and don't wait for the other one to
|
|
// handle it instead of us.
|
|
emitter.count += 1
|
|
|
|
signals = signals.filter(function (sig) {
|
|
try {
|
|
process.on(sig, sigListeners[sig])
|
|
return true
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
})
|
|
|
|
process.emit = processEmit
|
|
process.reallyExit = processReallyExit
|
|
}
|
|
|
|
var originalProcessReallyExit = process.reallyExit
|
|
function processReallyExit (code) {
|
|
process.exitCode = code || 0
|
|
emit('exit', process.exitCode, null)
|
|
/* istanbul ignore next */
|
|
emit('afterexit', process.exitCode, null)
|
|
/* istanbul ignore next */
|
|
originalProcessReallyExit.call(process, process.exitCode)
|
|
}
|
|
|
|
var originalProcessEmit = process.emit
|
|
function processEmit (ev, arg) {
|
|
if (ev === 'exit') {
|
|
if (arg !== undefined) {
|
|
process.exitCode = arg
|
|
}
|
|
var ret = originalProcessEmit.apply(this, arguments)
|
|
emit('exit', process.exitCode, null)
|
|
/* istanbul ignore next */
|
|
emit('afterexit', process.exitCode, null)
|
|
return ret
|
|
} else {
|
|
return originalProcessEmit.apply(this, arguments)
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 262:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const fs_1 = __webpack_require__(747);
|
|
const os_1 = __webpack_require__(87);
|
|
class Context {
|
|
/**
|
|
* Hydrate the context from the environment
|
|
*/
|
|
constructor() {
|
|
this.payload = {};
|
|
if (process.env.GITHUB_EVENT_PATH) {
|
|
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
|
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
|
}
|
|
else {
|
|
const path = process.env.GITHUB_EVENT_PATH;
|
|
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
|
}
|
|
}
|
|
this.eventName = process.env.GITHUB_EVENT_NAME;
|
|
this.sha = process.env.GITHUB_SHA;
|
|
this.ref = process.env.GITHUB_REF;
|
|
this.workflow = process.env.GITHUB_WORKFLOW;
|
|
this.action = process.env.GITHUB_ACTION;
|
|
this.actor = process.env.GITHUB_ACTOR;
|
|
}
|
|
get issue() {
|
|
const payload = this.payload;
|
|
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
|
}
|
|
get repo() {
|
|
if (process.env.GITHUB_REPOSITORY) {
|
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
|
return { owner, repo };
|
|
}
|
|
if (this.payload.repository) {
|
|
return {
|
|
owner: this.payload.repository.owner.login,
|
|
repo: this.payload.repository.name
|
|
};
|
|
}
|
|
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
|
}
|
|
}
|
|
exports.Context = Context;
|
|
//# sourceMappingURL=context.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 265:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = getPage
|
|
|
|
const deprecate = __webpack_require__(370)
|
|
const getPageLinks = __webpack_require__(577)
|
|
const HttpError = __webpack_require__(297)
|
|
|
|
function getPage (octokit, link, which, headers) {
|
|
deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
|
|
const url = getPageLinks(link)[which]
|
|
|
|
if (!url) {
|
|
const urlError = new HttpError(`No ${which} page found`, 404)
|
|
return Promise.reject(urlError)
|
|
}
|
|
|
|
const requestOptions = {
|
|
url,
|
|
headers: applyAcceptHeader(link, headers)
|
|
}
|
|
|
|
const promise = octokit.request(requestOptions)
|
|
|
|
return promise
|
|
}
|
|
|
|
function applyAcceptHeader (res, headers) {
|
|
const previous = res.headers && res.headers['x-github-media-type']
|
|
|
|
if (!previous || (headers && headers.accept)) {
|
|
return headers
|
|
}
|
|
headers = headers || {}
|
|
headers.accept = 'application/vnd.' + previous
|
|
.replace('; param=', '.')
|
|
.replace('; format=', '+')
|
|
|
|
return headers
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 280:
|
|
/***/ (function(module, exports) {
|
|
|
|
exports = module.exports = SemVer
|
|
|
|
var debug
|
|
/* istanbul ignore next */
|
|
if (typeof process === 'object' &&
|
|
process.env &&
|
|
process.env.NODE_DEBUG &&
|
|
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
|
|
debug = function () {
|
|
var args = Array.prototype.slice.call(arguments, 0)
|
|
args.unshift('SEMVER')
|
|
console.log.apply(console, args)
|
|
}
|
|
} else {
|
|
debug = function () {}
|
|
}
|
|
|
|
// Note: this is the semver.org version of the spec that it implements
|
|
// Not necessarily the package version of this code.
|
|
exports.SEMVER_SPEC_VERSION = '2.0.0'
|
|
|
|
var MAX_LENGTH = 256
|
|
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|
/* istanbul ignore next */ 9007199254740991
|
|
|
|
// Max safe segment length for coercion.
|
|
var MAX_SAFE_COMPONENT_LENGTH = 16
|
|
|
|
// The actual regexps go on exports.re
|
|
var re = exports.re = []
|
|
var src = exports.src = []
|
|
var R = 0
|
|
|
|
// The following Regular Expressions can be used for tokenizing,
|
|
// validating, and parsing SemVer version strings.
|
|
|
|
// ## Numeric Identifier
|
|
// A single `0`, or a non-zero digit followed by zero or more digits.
|
|
|
|
var NUMERICIDENTIFIER = R++
|
|
src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
|
var NUMERICIDENTIFIERLOOSE = R++
|
|
src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
|
|
|
// ## Non-numeric Identifier
|
|
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
|
// more letters, digits, or hyphens.
|
|
|
|
var NONNUMERICIDENTIFIER = R++
|
|
src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
|
|
|
// ## Main Version
|
|
// Three dot-separated numeric identifiers.
|
|
|
|
var MAINVERSION = R++
|
|
src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[NUMERICIDENTIFIER] + ')'
|
|
|
|
var MAINVERSIONLOOSE = R++
|
|
src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[NUMERICIDENTIFIERLOOSE] + ')'
|
|
|
|
// ## Pre-release Version Identifier
|
|
// A numeric identifier, or a non-numeric identifier.
|
|
|
|
var PRERELEASEIDENTIFIER = R++
|
|
src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
|
|
'|' + src[NONNUMERICIDENTIFIER] + ')'
|
|
|
|
var PRERELEASEIDENTIFIERLOOSE = R++
|
|
src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
|
|
'|' + src[NONNUMERICIDENTIFIER] + ')'
|
|
|
|
// ## Pre-release Version
|
|
// Hyphen, followed by one or more dot-separated pre-release version
|
|
// identifiers.
|
|
|
|
var PRERELEASE = R++
|
|
src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
|
|
'(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
|
|
|
|
var PRERELEASELOOSE = R++
|
|
src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
|
|
'(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
|
|
|
|
// ## Build Metadata Identifier
|
|
// Any combination of digits, letters, or hyphens.
|
|
|
|
var BUILDIDENTIFIER = R++
|
|
src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
|
|
|
// ## Build Metadata
|
|
// Plus sign, followed by one or more period-separated build metadata
|
|
// identifiers.
|
|
|
|
var BUILD = R++
|
|
src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
|
|
'(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
|
|
|
|
// ## Full Version String
|
|
// A main version, followed optionally by a pre-release version and
|
|
// build metadata.
|
|
|
|
// Note that the only major, minor, patch, and pre-release sections of
|
|
// the version string are capturing groups. The build metadata is not a
|
|
// capturing group, because it should not ever be used in version
|
|
// comparison.
|
|
|
|
var FULL = R++
|
|
var FULLPLAIN = 'v?' + src[MAINVERSION] +
|
|
src[PRERELEASE] + '?' +
|
|
src[BUILD] + '?'
|
|
|
|
src[FULL] = '^' + FULLPLAIN + '$'
|
|
|
|
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
|
|
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
|
|
// common in the npm registry.
|
|
var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
|
|
src[PRERELEASELOOSE] + '?' +
|
|
src[BUILD] + '?'
|
|
|
|
var LOOSE = R++
|
|
src[LOOSE] = '^' + LOOSEPLAIN + '$'
|
|
|
|
var GTLT = R++
|
|
src[GTLT] = '((?:<|>)?=?)'
|
|
|
|
// Something like "2.*" or "1.2.x".
|
|
// Note that "x.x" is a valid xRange identifer, meaning "any version"
|
|
// Only the first item is strictly required.
|
|
var XRANGEIDENTIFIERLOOSE = R++
|
|
src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
|
|
var XRANGEIDENTIFIER = R++
|
|
src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
|
|
|
|
var XRANGEPLAIN = R++
|
|
src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
|
|
'(?:' + src[PRERELEASE] + ')?' +
|
|
src[BUILD] + '?' +
|
|
')?)?'
|
|
|
|
var XRANGEPLAINLOOSE = R++
|
|
src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:' + src[PRERELEASELOOSE] + ')?' +
|
|
src[BUILD] + '?' +
|
|
')?)?'
|
|
|
|
var XRANGE = R++
|
|
src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
|
|
var XRANGELOOSE = R++
|
|
src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Coercion.
|
|
// Extract anything that could conceivably be a part of a valid semver
|
|
var COERCE = R++
|
|
src[COERCE] = '(?:^|[^\\d])' +
|
|
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:$|[^\\d])'
|
|
|
|
// Tilde ranges.
|
|
// Meaning is "reasonably at or greater than"
|
|
var LONETILDE = R++
|
|
src[LONETILDE] = '(?:~>?)'
|
|
|
|
var TILDETRIM = R++
|
|
src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
|
|
re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
|
|
var tildeTrimReplace = '$1~'
|
|
|
|
var TILDE = R++
|
|
src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
|
|
var TILDELOOSE = R++
|
|
src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Caret ranges.
|
|
// Meaning is "at least and backwards compatible with"
|
|
var LONECARET = R++
|
|
src[LONECARET] = '(?:\\^)'
|
|
|
|
var CARETTRIM = R++
|
|
src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
|
|
re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
|
|
var caretTrimReplace = '$1^'
|
|
|
|
var CARET = R++
|
|
src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
|
|
var CARETLOOSE = R++
|
|
src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
|
|
|
|
// A simple gt/lt/eq thing, or just "" to indicate "any version"
|
|
var COMPARATORLOOSE = R++
|
|
src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
|
|
var COMPARATOR = R++
|
|
src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
|
|
|
|
// An expression to strip any whitespace between the gtlt and the thing
|
|
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
|
|
var COMPARATORTRIM = R++
|
|
src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
|
|
'\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
|
|
|
|
// this one has to use the /g flag
|
|
re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
|
|
var comparatorTrimReplace = '$1$2$3'
|
|
|
|
// Something like `1.2.3 - 1.2.4`
|
|
// Note that these all use the loose form, because they'll be
|
|
// checked against either the strict or loose comparator form
|
|
// later.
|
|
var HYPHENRANGE = R++
|
|
src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[XRANGEPLAIN] + ')' +
|
|
'\\s*$'
|
|
|
|
var HYPHENRANGELOOSE = R++
|
|
src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[XRANGEPLAINLOOSE] + ')' +
|
|
'\\s*$'
|
|
|
|
// Star ranges basically just allow anything at all.
|
|
var STAR = R++
|
|
src[STAR] = '(<|>)?=?\\s*\\*'
|
|
|
|
// Compile to actual regexp objects.
|
|
// All are flag-free, unless they were created above with a flag.
|
|
for (var i = 0; i < R; i++) {
|
|
debug(i, src[i])
|
|
if (!re[i]) {
|
|
re[i] = new RegExp(src[i])
|
|
}
|
|
}
|
|
|
|
exports.parse = parse
|
|
function parse (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
return null
|
|
}
|
|
|
|
var r = options.loose ? re[LOOSE] : re[FULL]
|
|
if (!r.test(version)) {
|
|
return null
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.valid = valid
|
|
function valid (version, options) {
|
|
var v = parse(version, options)
|
|
return v ? v.version : null
|
|
}
|
|
|
|
exports.clean = clean
|
|
function clean (version, options) {
|
|
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
|
|
return s ? s.version : null
|
|
}
|
|
|
|
exports.SemVer = SemVer
|
|
|
|
function SemVer (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
if (version instanceof SemVer) {
|
|
if (version.loose === options.loose) {
|
|
return version
|
|
} else {
|
|
version = version.version
|
|
}
|
|
} else if (typeof version !== 'string') {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
|
|
}
|
|
|
|
if (!(this instanceof SemVer)) {
|
|
return new SemVer(version, options)
|
|
}
|
|
|
|
debug('SemVer', version, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
|
|
var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
this.raw = version
|
|
|
|
// these are actually numbers
|
|
this.major = +m[1]
|
|
this.minor = +m[2]
|
|
this.patch = +m[3]
|
|
|
|
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
|
|
throw new TypeError('Invalid major version')
|
|
}
|
|
|
|
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
|
|
throw new TypeError('Invalid minor version')
|
|
}
|
|
|
|
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
|
|
throw new TypeError('Invalid patch version')
|
|
}
|
|
|
|
// numberify any prerelease numeric ids
|
|
if (!m[4]) {
|
|
this.prerelease = []
|
|
} else {
|
|
this.prerelease = m[4].split('.').map(function (id) {
|
|
if (/^[0-9]+$/.test(id)) {
|
|
var num = +id
|
|
if (num >= 0 && num < MAX_SAFE_INTEGER) {
|
|
return num
|
|
}
|
|
}
|
|
return id
|
|
})
|
|
}
|
|
|
|
this.build = m[5] ? m[5].split('.') : []
|
|
this.format()
|
|
}
|
|
|
|
SemVer.prototype.format = function () {
|
|
this.version = this.major + '.' + this.minor + '.' + this.patch
|
|
if (this.prerelease.length) {
|
|
this.version += '-' + this.prerelease.join('.')
|
|
}
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.toString = function () {
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.compare = function (other) {
|
|
debug('SemVer.compare', this.version, this.options, other)
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return this.compareMain(other) || this.comparePre(other)
|
|
}
|
|
|
|
SemVer.prototype.compareMain = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return compareIdentifiers(this.major, other.major) ||
|
|
compareIdentifiers(this.minor, other.minor) ||
|
|
compareIdentifiers(this.patch, other.patch)
|
|
}
|
|
|
|
SemVer.prototype.comparePre = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
// NOT having a prerelease is > having one
|
|
if (this.prerelease.length && !other.prerelease.length) {
|
|
return -1
|
|
} else if (!this.prerelease.length && other.prerelease.length) {
|
|
return 1
|
|
} else if (!this.prerelease.length && !other.prerelease.length) {
|
|
return 0
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.prerelease[i]
|
|
var b = other.prerelease[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
// preminor will bump the version up to the next minor release, and immediately
|
|
// down to pre-release. premajor and prepatch work the same way.
|
|
SemVer.prototype.inc = function (release, identifier) {
|
|
switch (release) {
|
|
case 'premajor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor = 0
|
|
this.major++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'preminor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'prepatch':
|
|
// If this is already a prerelease, it will bump to the next version
|
|
// drop any prereleases that might already exist, since they are not
|
|
// relevant at this point.
|
|
this.prerelease.length = 0
|
|
this.inc('patch', identifier)
|
|
this.inc('pre', identifier)
|
|
break
|
|
// If the input is a non-prerelease version, this acts the same as
|
|
// prepatch.
|
|
case 'prerelease':
|
|
if (this.prerelease.length === 0) {
|
|
this.inc('patch', identifier)
|
|
}
|
|
this.inc('pre', identifier)
|
|
break
|
|
|
|
case 'major':
|
|
// If this is a pre-major version, bump up to the same major version.
|
|
// Otherwise increment major.
|
|
// 1.0.0-5 bumps to 1.0.0
|
|
// 1.1.0 bumps to 2.0.0
|
|
if (this.minor !== 0 ||
|
|
this.patch !== 0 ||
|
|
this.prerelease.length === 0) {
|
|
this.major++
|
|
}
|
|
this.minor = 0
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'minor':
|
|
// If this is a pre-minor version, bump up to the same minor version.
|
|
// Otherwise increment minor.
|
|
// 1.2.0-5 bumps to 1.2.0
|
|
// 1.2.1 bumps to 1.3.0
|
|
if (this.patch !== 0 || this.prerelease.length === 0) {
|
|
this.minor++
|
|
}
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'patch':
|
|
// If this is not a pre-release version, it will increment the patch.
|
|
// If it is a pre-release it will bump up to the same patch version.
|
|
// 1.2.0-5 patches to 1.2.0
|
|
// 1.2.0 patches to 1.2.1
|
|
if (this.prerelease.length === 0) {
|
|
this.patch++
|
|
}
|
|
this.prerelease = []
|
|
break
|
|
// This probably shouldn't be used publicly.
|
|
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
|
|
case 'pre':
|
|
if (this.prerelease.length === 0) {
|
|
this.prerelease = [0]
|
|
} else {
|
|
var i = this.prerelease.length
|
|
while (--i >= 0) {
|
|
if (typeof this.prerelease[i] === 'number') {
|
|
this.prerelease[i]++
|
|
i = -2
|
|
}
|
|
}
|
|
if (i === -1) {
|
|
// didn't increment anything
|
|
this.prerelease.push(0)
|
|
}
|
|
}
|
|
if (identifier) {
|
|
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
|
|
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
|
|
if (this.prerelease[0] === identifier) {
|
|
if (isNaN(this.prerelease[1])) {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
} else {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
}
|
|
break
|
|
|
|
default:
|
|
throw new Error('invalid increment argument: ' + release)
|
|
}
|
|
this.format()
|
|
this.raw = this.version
|
|
return this
|
|
}
|
|
|
|
exports.inc = inc
|
|
function inc (version, release, loose, identifier) {
|
|
if (typeof (loose) === 'string') {
|
|
identifier = loose
|
|
loose = undefined
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, loose).inc(release, identifier).version
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.diff = diff
|
|
function diff (version1, version2) {
|
|
if (eq(version1, version2)) {
|
|
return null
|
|
} else {
|
|
var v1 = parse(version1)
|
|
var v2 = parse(version2)
|
|
var prefix = ''
|
|
if (v1.prerelease.length || v2.prerelease.length) {
|
|
prefix = 'pre'
|
|
var defaultResult = 'prerelease'
|
|
}
|
|
for (var key in v1) {
|
|
if (key === 'major' || key === 'minor' || key === 'patch') {
|
|
if (v1[key] !== v2[key]) {
|
|
return prefix + key
|
|
}
|
|
}
|
|
}
|
|
return defaultResult // may be undefined
|
|
}
|
|
}
|
|
|
|
exports.compareIdentifiers = compareIdentifiers
|
|
|
|
var numeric = /^[0-9]+$/
|
|
function compareIdentifiers (a, b) {
|
|
var anum = numeric.test(a)
|
|
var bnum = numeric.test(b)
|
|
|
|
if (anum && bnum) {
|
|
a = +a
|
|
b = +b
|
|
}
|
|
|
|
return a === b ? 0
|
|
: (anum && !bnum) ? -1
|
|
: (bnum && !anum) ? 1
|
|
: a < b ? -1
|
|
: 1
|
|
}
|
|
|
|
exports.rcompareIdentifiers = rcompareIdentifiers
|
|
function rcompareIdentifiers (a, b) {
|
|
return compareIdentifiers(b, a)
|
|
}
|
|
|
|
exports.major = major
|
|
function major (a, loose) {
|
|
return new SemVer(a, loose).major
|
|
}
|
|
|
|
exports.minor = minor
|
|
function minor (a, loose) {
|
|
return new SemVer(a, loose).minor
|
|
}
|
|
|
|
exports.patch = patch
|
|
function patch (a, loose) {
|
|
return new SemVer(a, loose).patch
|
|
}
|
|
|
|
exports.compare = compare
|
|
function compare (a, b, loose) {
|
|
return new SemVer(a, loose).compare(new SemVer(b, loose))
|
|
}
|
|
|
|
exports.compareLoose = compareLoose
|
|
function compareLoose (a, b) {
|
|
return compare(a, b, true)
|
|
}
|
|
|
|
exports.rcompare = rcompare
|
|
function rcompare (a, b, loose) {
|
|
return compare(b, a, loose)
|
|
}
|
|
|
|
exports.sort = sort
|
|
function sort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compare(a, b, loose)
|
|
})
|
|
}
|
|
|
|
exports.rsort = rsort
|
|
function rsort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.rcompare(a, b, loose)
|
|
})
|
|
}
|
|
|
|
exports.gt = gt
|
|
function gt (a, b, loose) {
|
|
return compare(a, b, loose) > 0
|
|
}
|
|
|
|
exports.lt = lt
|
|
function lt (a, b, loose) {
|
|
return compare(a, b, loose) < 0
|
|
}
|
|
|
|
exports.eq = eq
|
|
function eq (a, b, loose) {
|
|
return compare(a, b, loose) === 0
|
|
}
|
|
|
|
exports.neq = neq
|
|
function neq (a, b, loose) {
|
|
return compare(a, b, loose) !== 0
|
|
}
|
|
|
|
exports.gte = gte
|
|
function gte (a, b, loose) {
|
|
return compare(a, b, loose) >= 0
|
|
}
|
|
|
|
exports.lte = lte
|
|
function lte (a, b, loose) {
|
|
return compare(a, b, loose) <= 0
|
|
}
|
|
|
|
exports.cmp = cmp
|
|
function cmp (a, op, b, loose) {
|
|
switch (op) {
|
|
case '===':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a === b
|
|
|
|
case '!==':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a !== b
|
|
|
|
case '':
|
|
case '=':
|
|
case '==':
|
|
return eq(a, b, loose)
|
|
|
|
case '!=':
|
|
return neq(a, b, loose)
|
|
|
|
case '>':
|
|
return gt(a, b, loose)
|
|
|
|
case '>=':
|
|
return gte(a, b, loose)
|
|
|
|
case '<':
|
|
return lt(a, b, loose)
|
|
|
|
case '<=':
|
|
return lte(a, b, loose)
|
|
|
|
default:
|
|
throw new TypeError('Invalid operator: ' + op)
|
|
}
|
|
}
|
|
|
|
exports.Comparator = Comparator
|
|
function Comparator (comp, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (comp instanceof Comparator) {
|
|
if (comp.loose === !!options.loose) {
|
|
return comp
|
|
} else {
|
|
comp = comp.value
|
|
}
|
|
}
|
|
|
|
if (!(this instanceof Comparator)) {
|
|
return new Comparator(comp, options)
|
|
}
|
|
|
|
debug('comparator', comp, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.parse(comp)
|
|
|
|
if (this.semver === ANY) {
|
|
this.value = ''
|
|
} else {
|
|
this.value = this.operator + this.semver.version
|
|
}
|
|
|
|
debug('comp', this)
|
|
}
|
|
|
|
var ANY = {}
|
|
Comparator.prototype.parse = function (comp) {
|
|
var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
|
|
var m = comp.match(r)
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid comparator: ' + comp)
|
|
}
|
|
|
|
this.operator = m[1]
|
|
if (this.operator === '=') {
|
|
this.operator = ''
|
|
}
|
|
|
|
// if it literally is just '>' or '' then allow anything.
|
|
if (!m[2]) {
|
|
this.semver = ANY
|
|
} else {
|
|
this.semver = new SemVer(m[2], this.options.loose)
|
|
}
|
|
}
|
|
|
|
Comparator.prototype.toString = function () {
|
|
return this.value
|
|
}
|
|
|
|
Comparator.prototype.test = function (version) {
|
|
debug('Comparator.test', version, this.options.loose)
|
|
|
|
if (this.semver === ANY) {
|
|
return true
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
version = new SemVer(version, this.options)
|
|
}
|
|
|
|
return cmp(version, this.operator, this.semver, this.options)
|
|
}
|
|
|
|
Comparator.prototype.intersects = function (comp, options) {
|
|
if (!(comp instanceof Comparator)) {
|
|
throw new TypeError('a Comparator is required')
|
|
}
|
|
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
var rangeTmp
|
|
|
|
if (this.operator === '') {
|
|
rangeTmp = new Range(comp.value, options)
|
|
return satisfies(this.value, rangeTmp, options)
|
|
} else if (comp.operator === '') {
|
|
rangeTmp = new Range(this.value, options)
|
|
return satisfies(comp.semver, rangeTmp, options)
|
|
}
|
|
|
|
var sameDirectionIncreasing =
|
|
(this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '>=' || comp.operator === '>')
|
|
var sameDirectionDecreasing =
|
|
(this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '<=' || comp.operator === '<')
|
|
var sameSemVer = this.semver.version === comp.semver.version
|
|
var differentDirectionsInclusive =
|
|
(this.operator === '>=' || this.operator === '<=') &&
|
|
(comp.operator === '>=' || comp.operator === '<=')
|
|
var oppositeDirectionsLessThan =
|
|
cmp(this.semver, '<', comp.semver, options) &&
|
|
((this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '<=' || comp.operator === '<'))
|
|
var oppositeDirectionsGreaterThan =
|
|
cmp(this.semver, '>', comp.semver, options) &&
|
|
((this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '>=' || comp.operator === '>'))
|
|
|
|
return sameDirectionIncreasing || sameDirectionDecreasing ||
|
|
(sameSemVer && differentDirectionsInclusive) ||
|
|
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
|
|
}
|
|
|
|
exports.Range = Range
|
|
function Range (range, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (range instanceof Range) {
|
|
if (range.loose === !!options.loose &&
|
|
range.includePrerelease === !!options.includePrerelease) {
|
|
return range
|
|
} else {
|
|
return new Range(range.raw, options)
|
|
}
|
|
}
|
|
|
|
if (range instanceof Comparator) {
|
|
return new Range(range.value, options)
|
|
}
|
|
|
|
if (!(this instanceof Range)) {
|
|
return new Range(range, options)
|
|
}
|
|
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.includePrerelease = !!options.includePrerelease
|
|
|
|
// First, split based on boolean or ||
|
|
this.raw = range
|
|
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
|
return this.parseRange(range.trim())
|
|
}, this).filter(function (c) {
|
|
// throw out any that are not relevant for whatever reason
|
|
return c.length
|
|
})
|
|
|
|
if (!this.set.length) {
|
|
throw new TypeError('Invalid SemVer Range: ' + range)
|
|
}
|
|
|
|
this.format()
|
|
}
|
|
|
|
Range.prototype.format = function () {
|
|
this.range = this.set.map(function (comps) {
|
|
return comps.join(' ').trim()
|
|
}).join('||').trim()
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.toString = function () {
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.parseRange = function (range) {
|
|
var loose = this.options.loose
|
|
range = range.trim()
|
|
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
|
var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
|
|
range = range.replace(hr, hyphenReplace)
|
|
debug('hyphen replace', range)
|
|
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
|
range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
|
|
debug('comparator trim', range, re[COMPARATORTRIM])
|
|
|
|
// `~ 1.2.3` => `~1.2.3`
|
|
range = range.replace(re[TILDETRIM], tildeTrimReplace)
|
|
|
|
// `^ 1.2.3` => `^1.2.3`
|
|
range = range.replace(re[CARETTRIM], caretTrimReplace)
|
|
|
|
// normalize spaces
|
|
range = range.split(/\s+/).join(' ')
|
|
|
|
// At this point, the range is completely trimmed and
|
|
// ready to be split into comparators.
|
|
|
|
var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
|
|
var set = range.split(' ').map(function (comp) {
|
|
return parseComparator(comp, this.options)
|
|
}, this).join(' ').split(/\s+/)
|
|
if (this.options.loose) {
|
|
// in loose mode, throw out any that are not valid comparators
|
|
set = set.filter(function (comp) {
|
|
return !!comp.match(compRe)
|
|
})
|
|
}
|
|
set = set.map(function (comp) {
|
|
return new Comparator(comp, this.options)
|
|
}, this)
|
|
|
|
return set
|
|
}
|
|
|
|
Range.prototype.intersects = function (range, options) {
|
|
if (!(range instanceof Range)) {
|
|
throw new TypeError('a Range is required')
|
|
}
|
|
|
|
return this.set.some(function (thisComparators) {
|
|
return thisComparators.every(function (thisComparator) {
|
|
return range.set.some(function (rangeComparators) {
|
|
return rangeComparators.every(function (rangeComparator) {
|
|
return thisComparator.intersects(rangeComparator, options)
|
|
})
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
// Mostly just for testing and legacy API reasons
|
|
exports.toComparators = toComparators
|
|
function toComparators (range, options) {
|
|
return new Range(range, options).set.map(function (comp) {
|
|
return comp.map(function (c) {
|
|
return c.value
|
|
}).join(' ').trim().split(' ')
|
|
})
|
|
}
|
|
|
|
// comprised of xranges, tildes, stars, and gtlt's at this point.
|
|
// already replaced the hyphen ranges
|
|
// turn into a set of JUST comparators.
|
|
function parseComparator (comp, options) {
|
|
debug('comp', comp, options)
|
|
comp = replaceCarets(comp, options)
|
|
debug('caret', comp)
|
|
comp = replaceTildes(comp, options)
|
|
debug('tildes', comp)
|
|
comp = replaceXRanges(comp, options)
|
|
debug('xrange', comp)
|
|
comp = replaceStars(comp, options)
|
|
debug('stars', comp)
|
|
return comp
|
|
}
|
|
|
|
function isX (id) {
|
|
return !id || id.toLowerCase() === 'x' || id === '*'
|
|
}
|
|
|
|
// ~, ~> --> * (any, kinda silly)
|
|
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
|
|
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
|
|
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
|
|
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
|
|
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
|
|
function replaceTildes (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceTilde(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceTilde (comp, options) {
|
|
var r = options.loose ? re[TILDELOOSE] : re[TILDE]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('tilde', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
// ~1.2 == >=1.2.0 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else if (pr) {
|
|
debug('replaceTilde pr', pr)
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
// ~1.2.3 == >=1.2.3 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
|
|
debug('tilde return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// ^ --> * (any, kinda silly)
|
|
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
|
|
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
|
|
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
|
|
// ^1.2.3 --> >=1.2.3 <2.0.0
|
|
// ^1.2.0 --> >=1.2.0 <2.0.0
|
|
function replaceCarets (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceCaret(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceCaret (comp, options) {
|
|
debug('caret', comp, options)
|
|
var r = options.loose ? re[CARETLOOSE] : re[CARET]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('caret', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
if (M === '0') {
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else if (pr) {
|
|
debug('replaceCaret pr', pr)
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else {
|
|
debug('no pr')
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
}
|
|
|
|
debug('caret return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
function replaceXRanges (comp, options) {
|
|
debug('replaceXRanges', comp, options)
|
|
return comp.split(/\s+/).map(function (comp) {
|
|
return replaceXRange(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceXRange (comp, options) {
|
|
comp = comp.trim()
|
|
var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
|
|
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
|
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
|
var xM = isX(M)
|
|
var xm = xM || isX(m)
|
|
var xp = xm || isX(p)
|
|
var anyX = xp
|
|
|
|
if (gtlt === '=' && anyX) {
|
|
gtlt = ''
|
|
}
|
|
|
|
if (xM) {
|
|
if (gtlt === '>' || gtlt === '<') {
|
|
// nothing is allowed
|
|
ret = '<0.0.0'
|
|
} else {
|
|
// nothing is forbidden
|
|
ret = '*'
|
|
}
|
|
} else if (gtlt && anyX) {
|
|
// we know patch is an x, because we have any x at all.
|
|
// replace X with 0
|
|
if (xm) {
|
|
m = 0
|
|
}
|
|
p = 0
|
|
|
|
if (gtlt === '>') {
|
|
// >1 => >=2.0.0
|
|
// >1.2 => >=1.3.0
|
|
// >1.2.3 => >= 1.2.4
|
|
gtlt = '>='
|
|
if (xm) {
|
|
M = +M + 1
|
|
m = 0
|
|
p = 0
|
|
} else {
|
|
m = +m + 1
|
|
p = 0
|
|
}
|
|
} else if (gtlt === '<=') {
|
|
// <=0.7.x is actually <0.8.0, since any 0.7.x should
|
|
// pass. Similarly, <=7.x is actually <8.0.0, etc.
|
|
gtlt = '<'
|
|
if (xm) {
|
|
M = +M + 1
|
|
} else {
|
|
m = +m + 1
|
|
}
|
|
}
|
|
|
|
ret = gtlt + M + '.' + m + '.' + p
|
|
} else if (xm) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (xp) {
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
|
|
debug('xRange return', ret)
|
|
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// Because * is AND-ed with everything else in the comparator,
|
|
// and '' means "any version", just remove the *s entirely.
|
|
function replaceStars (comp, options) {
|
|
debug('replaceStars', comp, options)
|
|
// Looseness is ignored here. star is always as loose as it gets!
|
|
return comp.trim().replace(re[STAR], '')
|
|
}
|
|
|
|
// This function is passed to string.replace(re[HYPHENRANGE])
|
|
// M, m, patch, prerelease, build
|
|
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
|
|
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
|
|
// 1.2 - 3.4 => >=1.2.0 <3.5.0
|
|
function hyphenReplace ($0,
|
|
from, fM, fm, fp, fpr, fb,
|
|
to, tM, tm, tp, tpr, tb) {
|
|
if (isX(fM)) {
|
|
from = ''
|
|
} else if (isX(fm)) {
|
|
from = '>=' + fM + '.0.0'
|
|
} else if (isX(fp)) {
|
|
from = '>=' + fM + '.' + fm + '.0'
|
|
} else {
|
|
from = '>=' + from
|
|
}
|
|
|
|
if (isX(tM)) {
|
|
to = ''
|
|
} else if (isX(tm)) {
|
|
to = '<' + (+tM + 1) + '.0.0'
|
|
} else if (isX(tp)) {
|
|
to = '<' + tM + '.' + (+tm + 1) + '.0'
|
|
} else if (tpr) {
|
|
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
|
|
} else {
|
|
to = '<=' + to
|
|
}
|
|
|
|
return (from + ' ' + to).trim()
|
|
}
|
|
|
|
// if ANY of the sets match ALL of its comparators, then pass
|
|
Range.prototype.test = function (version) {
|
|
if (!version) {
|
|
return false
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
version = new SemVer(version, this.options)
|
|
}
|
|
|
|
for (var i = 0; i < this.set.length; i++) {
|
|
if (testSet(this.set[i], version, this.options)) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
function testSet (set, version, options) {
|
|
for (var i = 0; i < set.length; i++) {
|
|
if (!set[i].test(version)) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
if (version.prerelease.length && !options.includePrerelease) {
|
|
// Find the set of versions that are allowed to have prereleases
|
|
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
|
|
// That should allow `1.2.3-pr.2` to pass.
|
|
// However, `1.2.4-alpha.notready` should NOT be allowed,
|
|
// even though it's within the range set by the comparators.
|
|
for (i = 0; i < set.length; i++) {
|
|
debug(set[i].semver)
|
|
if (set[i].semver === ANY) {
|
|
continue
|
|
}
|
|
|
|
if (set[i].semver.prerelease.length > 0) {
|
|
var allowed = set[i].semver
|
|
if (allowed.major === version.major &&
|
|
allowed.minor === version.minor &&
|
|
allowed.patch === version.patch) {
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
// Version has a -pre, but it's not one of the ones we like.
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
exports.satisfies = satisfies
|
|
function satisfies (version, range, options) {
|
|
try {
|
|
range = new Range(range, options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
return range.test(version)
|
|
}
|
|
|
|
exports.maxSatisfying = maxSatisfying
|
|
function maxSatisfying (versions, range, options) {
|
|
var max = null
|
|
var maxSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!max || maxSV.compare(v) === -1) {
|
|
// compare(max, v, true)
|
|
max = v
|
|
maxSV = new SemVer(max, options)
|
|
}
|
|
}
|
|
})
|
|
return max
|
|
}
|
|
|
|
exports.minSatisfying = minSatisfying
|
|
function minSatisfying (versions, range, options) {
|
|
var min = null
|
|
var minSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!min || minSV.compare(v) === 1) {
|
|
// compare(min, v, true)
|
|
min = v
|
|
minSV = new SemVer(min, options)
|
|
}
|
|
}
|
|
})
|
|
return min
|
|
}
|
|
|
|
exports.minVersion = minVersion
|
|
function minVersion (range, loose) {
|
|
range = new Range(range, loose)
|
|
|
|
var minver = new SemVer('0.0.0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = new SemVer('0.0.0-0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = null
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
comparators.forEach(function (comparator) {
|
|
// Clone to avoid manipulating the comparator's semver object.
|
|
var compver = new SemVer(comparator.semver.version)
|
|
switch (comparator.operator) {
|
|
case '>':
|
|
if (compver.prerelease.length === 0) {
|
|
compver.patch++
|
|
} else {
|
|
compver.prerelease.push(0)
|
|
}
|
|
compver.raw = compver.format()
|
|
/* fallthrough */
|
|
case '':
|
|
case '>=':
|
|
if (!minver || gt(minver, compver)) {
|
|
minver = compver
|
|
}
|
|
break
|
|
case '<':
|
|
case '<=':
|
|
/* Ignore maximum versions */
|
|
break
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw new Error('Unexpected operation: ' + comparator.operator)
|
|
}
|
|
})
|
|
}
|
|
|
|
if (minver && range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
exports.validRange = validRange
|
|
function validRange (range, options) {
|
|
try {
|
|
// Return '*' instead of '' so that truthiness works.
|
|
// This will throw if it's invalid anyway
|
|
return new Range(range, options).range || '*'
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
// Determine if version is less than all the versions possible in the range
|
|
exports.ltr = ltr
|
|
function ltr (version, range, options) {
|
|
return outside(version, range, '<', options)
|
|
}
|
|
|
|
// Determine if version is greater than all the versions possible in the range.
|
|
exports.gtr = gtr
|
|
function gtr (version, range, options) {
|
|
return outside(version, range, '>', options)
|
|
}
|
|
|
|
exports.outside = outside
|
|
function outside (version, range, hilo, options) {
|
|
version = new SemVer(version, options)
|
|
range = new Range(range, options)
|
|
|
|
var gtfn, ltefn, ltfn, comp, ecomp
|
|
switch (hilo) {
|
|
case '>':
|
|
gtfn = gt
|
|
ltefn = lte
|
|
ltfn = lt
|
|
comp = '>'
|
|
ecomp = '>='
|
|
break
|
|
case '<':
|
|
gtfn = lt
|
|
ltefn = gte
|
|
ltfn = gt
|
|
comp = '<'
|
|
ecomp = '<='
|
|
break
|
|
default:
|
|
throw new TypeError('Must provide a hilo val of "<" or ">"')
|
|
}
|
|
|
|
// If it satisifes the range it is not outside
|
|
if (satisfies(version, range, options)) {
|
|
return false
|
|
}
|
|
|
|
// From now on, variable terms are as if we're in "gtr" mode.
|
|
// but note that everything is flipped for the "ltr" function.
|
|
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
var high = null
|
|
var low = null
|
|
|
|
comparators.forEach(function (comparator) {
|
|
if (comparator.semver === ANY) {
|
|
comparator = new Comparator('>=0.0.0')
|
|
}
|
|
high = high || comparator
|
|
low = low || comparator
|
|
if (gtfn(comparator.semver, high.semver, options)) {
|
|
high = comparator
|
|
} else if (ltfn(comparator.semver, low.semver, options)) {
|
|
low = comparator
|
|
}
|
|
})
|
|
|
|
// If the edge version comparator has a operator then our version
|
|
// isn't outside it
|
|
if (high.operator === comp || high.operator === ecomp) {
|
|
return false
|
|
}
|
|
|
|
// If the lowest version comparator has an operator and our version
|
|
// is less than it then it isn't higher than the range
|
|
if ((!low.operator || low.operator === comp) &&
|
|
ltefn(version, low.semver)) {
|
|
return false
|
|
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
exports.prerelease = prerelease
|
|
function prerelease (version, options) {
|
|
var parsed = parse(version, options)
|
|
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
|
|
}
|
|
|
|
exports.intersects = intersects
|
|
function intersects (r1, r2, options) {
|
|
r1 = new Range(r1, options)
|
|
r2 = new Range(r2, options)
|
|
return r1.intersects(r2)
|
|
}
|
|
|
|
exports.coerce = coerce
|
|
function coerce (version) {
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
var match = version.match(re[COERCE])
|
|
|
|
if (match == null) {
|
|
return null
|
|
}
|
|
|
|
return parse(match[1] +
|
|
'.' + (match[2] || '0') +
|
|
'.' + (match[3] || '0'))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 287:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const assert = __importStar(__webpack_require__(357));
|
|
const core = __importStar(__webpack_require__(470));
|
|
const exec = __importStar(__webpack_require__(986));
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const io = __importStar(__webpack_require__(1));
|
|
const os = __importStar(__webpack_require__(87));
|
|
const path = __importStar(__webpack_require__(622));
|
|
const regexpHelper = __importStar(__webpack_require__(528));
|
|
const stateHelper = __importStar(__webpack_require__(153));
|
|
const urlHelper = __importStar(__webpack_require__(81));
|
|
const v4_1 = __importDefault(__webpack_require__(826));
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
const SSH_COMMAND_KEY = 'core.sshCommand';
|
|
function createAuthHelper(git, settings) {
|
|
return new GitAuthHelper(git, settings);
|
|
}
|
|
exports.createAuthHelper = createAuthHelper;
|
|
class GitAuthHelper {
|
|
constructor(gitCommandManager, gitSourceSettings) {
|
|
this.sshCommand = '';
|
|
this.sshKeyPath = '';
|
|
this.sshKnownHostsPath = '';
|
|
this.temporaryHomePath = '';
|
|
this.git = gitCommandManager;
|
|
this.settings = gitSourceSettings || {};
|
|
// Token auth header
|
|
const serverUrl = urlHelper.getServerUrl();
|
|
this.tokenConfigKey = `http.${serverUrl.origin}/.extraheader`; // "origin" is SCHEME://HOSTNAME[:PORT]
|
|
const basicCredential = Buffer.from(`x-access-token:${this.settings.authToken}`, 'utf8').toString('base64');
|
|
core.setSecret(basicCredential);
|
|
this.tokenPlaceholderConfigValue = `AUTHORIZATION: basic ***`;
|
|
this.tokenConfigValue = `AUTHORIZATION: basic ${basicCredential}`;
|
|
// Instead of SSH URL
|
|
this.insteadOfKey = `url.${serverUrl.origin}/.insteadOf`; // "origin" is SCHEME://HOSTNAME[:PORT]
|
|
this.insteadOfValue = `git@${serverUrl.hostname}:`;
|
|
}
|
|
configureAuth() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Remove possible previous values
|
|
yield this.removeAuth();
|
|
// Configure new values
|
|
yield this.configureSsh();
|
|
yield this.configureToken();
|
|
});
|
|
}
|
|
configureGlobalAuth() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Create a temp home directory
|
|
const runnerTemp = process.env['RUNNER_TEMP'] || '';
|
|
assert.ok(runnerTemp, 'RUNNER_TEMP is not defined');
|
|
const uniqueId = v4_1.default();
|
|
this.temporaryHomePath = path.join(runnerTemp, uniqueId);
|
|
yield fs.promises.mkdir(this.temporaryHomePath, { recursive: true });
|
|
// Copy the global git config
|
|
const gitConfigPath = path.join(process.env['HOME'] || os.homedir(), '.gitconfig');
|
|
const newGitConfigPath = path.join(this.temporaryHomePath, '.gitconfig');
|
|
let configExists = false;
|
|
try {
|
|
yield fs.promises.stat(gitConfigPath);
|
|
configExists = true;
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
throw err;
|
|
}
|
|
}
|
|
if (configExists) {
|
|
core.info(`Copying '${gitConfigPath}' to '${newGitConfigPath}'`);
|
|
yield io.cp(gitConfigPath, newGitConfigPath);
|
|
}
|
|
else {
|
|
yield fs.promises.writeFile(newGitConfigPath, '');
|
|
}
|
|
try {
|
|
// Override HOME
|
|
core.info(`Temporarily overriding HOME='${this.temporaryHomePath}' before making global git config changes`);
|
|
this.git.setEnvironmentVariable('HOME', this.temporaryHomePath);
|
|
// Configure the token
|
|
yield this.configureToken(newGitConfigPath, true);
|
|
// Configure HTTPS instead of SSH
|
|
yield this.git.tryConfigUnset(this.insteadOfKey, true);
|
|
if (!this.settings.sshKey) {
|
|
yield this.git.config(this.insteadOfKey, this.insteadOfValue, true);
|
|
}
|
|
}
|
|
catch (err) {
|
|
// Unset in case somehow written to the real global config
|
|
core.info('Encountered an error when attempting to configure token. Attempting unconfigure.');
|
|
yield this.git.tryConfigUnset(this.tokenConfigKey, true);
|
|
throw err;
|
|
}
|
|
});
|
|
}
|
|
configureSubmoduleAuth() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Remove possible previous HTTPS instead of SSH
|
|
yield this.removeGitConfig(this.insteadOfKey, true);
|
|
if (this.settings.persistCredentials) {
|
|
// Configure a placeholder value. This approach avoids the credential being captured
|
|
// by process creation audit events, which are commonly logged. For more information,
|
|
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
|
const output = yield this.git.submoduleForeach(`git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url`, this.settings.nestedSubmodules);
|
|
// Replace the placeholder
|
|
const configPaths = output.match(/(?<=(^|\n)file:)[^\t]+(?=\tremote\.origin\.url)/g) || [];
|
|
for (const configPath of configPaths) {
|
|
core.debug(`Replacing token placeholder in '${configPath}'`);
|
|
this.replaceTokenPlaceholder(configPath);
|
|
}
|
|
if (this.settings.sshKey) {
|
|
// Configure core.sshCommand
|
|
yield this.git.submoduleForeach(`git config --local '${SSH_COMMAND_KEY}' '${this.sshCommand}'`, this.settings.nestedSubmodules);
|
|
}
|
|
else {
|
|
// Configure HTTPS instead of SSH
|
|
yield this.git.submoduleForeach(`git config --local '${this.insteadOfKey}' '${this.insteadOfValue}'`, this.settings.nestedSubmodules);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
removeAuth() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield this.removeSsh();
|
|
yield this.removeToken();
|
|
});
|
|
}
|
|
removeGlobalAuth() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
core.debug(`Unsetting HOME override`);
|
|
this.git.removeEnvironmentVariable('HOME');
|
|
yield io.rmRF(this.temporaryHomePath);
|
|
});
|
|
}
|
|
configureSsh() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!this.settings.sshKey) {
|
|
return;
|
|
}
|
|
// Write key
|
|
const runnerTemp = process.env['RUNNER_TEMP'] || '';
|
|
assert.ok(runnerTemp, 'RUNNER_TEMP is not defined');
|
|
const uniqueId = v4_1.default();
|
|
this.sshKeyPath = path.join(runnerTemp, uniqueId);
|
|
stateHelper.setSshKeyPath(this.sshKeyPath);
|
|
yield fs.promises.mkdir(runnerTemp, { recursive: true });
|
|
yield fs.promises.writeFile(this.sshKeyPath, this.settings.sshKey.trim() + '\n', { mode: 0o600 });
|
|
// Remove inherited permissions on Windows
|
|
if (IS_WINDOWS) {
|
|
const icacls = yield io.which('icacls.exe');
|
|
yield exec.exec(`"${icacls}" "${this.sshKeyPath}" /grant:r "${process.env['USERDOMAIN']}\\${process.env['USERNAME']}:F"`);
|
|
yield exec.exec(`"${icacls}" "${this.sshKeyPath}" /inheritance:r`);
|
|
}
|
|
// Write known hosts
|
|
const userKnownHostsPath = path.join(os.homedir(), '.ssh', 'known_hosts');
|
|
let userKnownHosts = '';
|
|
try {
|
|
userKnownHosts = (yield fs.promises.readFile(userKnownHostsPath)).toString();
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
throw err;
|
|
}
|
|
}
|
|
let knownHosts = '';
|
|
if (userKnownHosts) {
|
|
knownHosts += `# Begin from ${userKnownHostsPath}\n${userKnownHosts}\n# End from ${userKnownHostsPath}\n`;
|
|
}
|
|
if (this.settings.sshKnownHosts) {
|
|
knownHosts += `# Begin from input known hosts\n${this.settings.sshKnownHosts}\n# end from input known hosts\n`;
|
|
}
|
|
knownHosts += `# Begin implicitly added github.com\ngithub.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==\n# End implicitly added github.com\n`;
|
|
this.sshKnownHostsPath = path.join(runnerTemp, `${uniqueId}_known_hosts`);
|
|
stateHelper.setSshKnownHostsPath(this.sshKnownHostsPath);
|
|
yield fs.promises.writeFile(this.sshKnownHostsPath, knownHosts);
|
|
// Configure GIT_SSH_COMMAND
|
|
const sshPath = yield io.which('ssh', true);
|
|
this.sshCommand = `"${sshPath}" -i "$RUNNER_TEMP/${path.basename(this.sshKeyPath)}"`;
|
|
if (this.settings.sshStrict) {
|
|
this.sshCommand += ' -o StrictHostKeyChecking=yes -o CheckHostIP=no';
|
|
}
|
|
this.sshCommand += ` -o "UserKnownHostsFile=$RUNNER_TEMP/${path.basename(this.sshKnownHostsPath)}"`;
|
|
core.info(`Temporarily overriding GIT_SSH_COMMAND=${this.sshCommand}`);
|
|
this.git.setEnvironmentVariable('GIT_SSH_COMMAND', this.sshCommand);
|
|
// Configure core.sshCommand
|
|
if (this.settings.persistCredentials) {
|
|
yield this.git.config(SSH_COMMAND_KEY, this.sshCommand);
|
|
}
|
|
});
|
|
}
|
|
configureToken(configPath, globalConfig) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Validate args
|
|
assert.ok((configPath && globalConfig) || (!configPath && !globalConfig), 'Unexpected configureToken parameter combinations');
|
|
// Default config path
|
|
if (!configPath && !globalConfig) {
|
|
configPath = path.join(this.git.getWorkingDirectory(), '.git', 'config');
|
|
}
|
|
// Configure a placeholder value. This approach avoids the credential being captured
|
|
// by process creation audit events, which are commonly logged. For more information,
|
|
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
|
yield this.git.config(this.tokenConfigKey, this.tokenPlaceholderConfigValue, globalConfig);
|
|
// Replace the placeholder
|
|
yield this.replaceTokenPlaceholder(configPath || '');
|
|
});
|
|
}
|
|
replaceTokenPlaceholder(configPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert.ok(configPath, 'configPath is not defined');
|
|
let content = (yield fs.promises.readFile(configPath)).toString();
|
|
const placeholderIndex = content.indexOf(this.tokenPlaceholderConfigValue);
|
|
if (placeholderIndex < 0 ||
|
|
placeholderIndex != content.lastIndexOf(this.tokenPlaceholderConfigValue)) {
|
|
throw new Error(`Unable to replace auth placeholder in ${configPath}`);
|
|
}
|
|
assert.ok(this.tokenConfigValue, 'tokenConfigValue is not defined');
|
|
content = content.replace(this.tokenPlaceholderConfigValue, this.tokenConfigValue);
|
|
yield fs.promises.writeFile(configPath, content);
|
|
});
|
|
}
|
|
removeSsh() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// SSH key
|
|
const keyPath = this.sshKeyPath || stateHelper.SshKeyPath;
|
|
if (keyPath) {
|
|
try {
|
|
yield io.rmRF(keyPath);
|
|
}
|
|
catch (err) {
|
|
core.debug(err.message);
|
|
core.warning(`Failed to remove SSH key '${keyPath}'`);
|
|
}
|
|
}
|
|
// SSH known hosts
|
|
const knownHostsPath = this.sshKnownHostsPath || stateHelper.SshKnownHostsPath;
|
|
if (knownHostsPath) {
|
|
try {
|
|
yield io.rmRF(knownHostsPath);
|
|
}
|
|
catch (_a) {
|
|
// Intentionally empty
|
|
}
|
|
}
|
|
// SSH command
|
|
yield this.removeGitConfig(SSH_COMMAND_KEY);
|
|
});
|
|
}
|
|
removeToken() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// HTTP extra header
|
|
yield this.removeGitConfig(this.tokenConfigKey);
|
|
});
|
|
}
|
|
removeGitConfig(configKey, submoduleOnly = false) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!submoduleOnly) {
|
|
if ((yield this.git.configExists(configKey)) &&
|
|
!(yield this.git.tryConfigUnset(configKey))) {
|
|
// Load the config contents
|
|
core.warning(`Failed to remove '${configKey}' from the git config`);
|
|
}
|
|
}
|
|
const pattern = regexpHelper.escape(configKey);
|
|
yield this.git.submoduleForeach(`git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :`, true);
|
|
});
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 289:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const exec = __importStar(__webpack_require__(986));
|
|
const fshelper = __importStar(__webpack_require__(618));
|
|
const io = __importStar(__webpack_require__(1));
|
|
const path = __importStar(__webpack_require__(622));
|
|
const refHelper = __importStar(__webpack_require__(227));
|
|
const regexpHelper = __importStar(__webpack_require__(528));
|
|
const retryHelper = __importStar(__webpack_require__(587));
|
|
const git_version_1 = __webpack_require__(559);
|
|
// Auth header not supported before 2.9
|
|
// Wire protocol v2 not supported before 2.18
|
|
exports.MinimumGitVersion = new git_version_1.GitVersion('2.18');
|
|
function createCommandManager(workingDirectory, lfs) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return yield GitCommandManager.createCommandManager(workingDirectory, lfs);
|
|
});
|
|
}
|
|
exports.createCommandManager = createCommandManager;
|
|
class GitCommandManager {
|
|
// Private constructor; use createCommandManager()
|
|
constructor() {
|
|
this.gitEnv = {
|
|
GIT_TERMINAL_PROMPT: '0',
|
|
GCM_INTERACTIVE: 'Never' // Disable prompting for git credential manager
|
|
};
|
|
this.gitPath = '';
|
|
this.lfs = false;
|
|
this.workingDirectory = '';
|
|
}
|
|
branchDelete(remote, branch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['branch', '--delete', '--force'];
|
|
if (remote) {
|
|
args.push('--remote');
|
|
}
|
|
args.push(branch);
|
|
yield this.execGit(args);
|
|
});
|
|
}
|
|
branchExists(remote, pattern) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['branch', '--list'];
|
|
if (remote) {
|
|
args.push('--remote');
|
|
}
|
|
args.push(pattern);
|
|
const output = yield this.execGit(args);
|
|
return !!output.stdout.trim();
|
|
});
|
|
}
|
|
branchList(remote) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const result = [];
|
|
// Note, this implementation uses "rev-parse --symbolic-full-name" because the output from
|
|
// "branch --list" is more difficult when in a detached HEAD state.
|
|
// Note, this implementation uses "rev-parse --symbolic-full-name" because there is a bug
|
|
// in Git 2.18 that causes "rev-parse --symbolic" to output symbolic full names.
|
|
const args = ['rev-parse', '--symbolic-full-name'];
|
|
if (remote) {
|
|
args.push('--remotes=origin');
|
|
}
|
|
else {
|
|
args.push('--branches');
|
|
}
|
|
const output = yield this.execGit(args);
|
|
for (let branch of output.stdout.trim().split('\n')) {
|
|
branch = branch.trim();
|
|
if (branch) {
|
|
if (branch.startsWith('refs/heads/')) {
|
|
branch = branch.substr('refs/heads/'.length);
|
|
}
|
|
else if (branch.startsWith('refs/remotes/')) {
|
|
branch = branch.substr('refs/remotes/'.length);
|
|
}
|
|
result.push(branch);
|
|
}
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
checkout(ref, startPoint) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['checkout', '--progress', '--force'];
|
|
if (startPoint) {
|
|
args.push('-B', ref, startPoint);
|
|
}
|
|
else {
|
|
args.push(ref);
|
|
}
|
|
yield this.execGit(args);
|
|
});
|
|
}
|
|
checkoutDetach() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['checkout', '--detach'];
|
|
yield this.execGit(args);
|
|
});
|
|
}
|
|
config(configKey, configValue, globalConfig) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield this.execGit([
|
|
'config',
|
|
globalConfig ? '--global' : '--local',
|
|
configKey,
|
|
configValue
|
|
]);
|
|
});
|
|
}
|
|
configExists(configKey, globalConfig) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const pattern = regexpHelper.escape(configKey);
|
|
const output = yield this.execGit([
|
|
'config',
|
|
globalConfig ? '--global' : '--local',
|
|
'--name-only',
|
|
'--get-regexp',
|
|
pattern
|
|
], true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
fetch(refSpec, fetchDepth) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['-c', 'protocol.version=2', 'fetch'];
|
|
if (!refSpec.some(x => x === refHelper.tagsRefSpec)) {
|
|
args.push('--no-tags');
|
|
}
|
|
args.push('--prune', '--progress', '--no-recurse-submodules');
|
|
if (fetchDepth && fetchDepth > 0) {
|
|
args.push(`--depth=${fetchDepth}`);
|
|
}
|
|
else if (fshelper.fileExistsSync(path.join(this.workingDirectory, '.git', 'shallow'))) {
|
|
args.push('--unshallow');
|
|
}
|
|
args.push('origin');
|
|
for (const arg of refSpec) {
|
|
args.push(arg);
|
|
}
|
|
const that = this;
|
|
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
yield that.execGit(args);
|
|
}));
|
|
});
|
|
}
|
|
getDefaultBranch(repositoryUrl) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let output;
|
|
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
output = yield this.execGit([
|
|
'ls-remote',
|
|
'--quiet',
|
|
'--exit-code',
|
|
'--symref',
|
|
repositoryUrl,
|
|
'HEAD'
|
|
]);
|
|
}));
|
|
if (output) {
|
|
// Satisfy compiler, will always be set
|
|
for (let line of output.stdout.trim().split('\n')) {
|
|
line = line.trim();
|
|
if (line.startsWith('ref:') || line.endsWith('HEAD')) {
|
|
return line
|
|
.substr('ref:'.length, line.length - 'ref:'.length - 'HEAD'.length)
|
|
.trim();
|
|
}
|
|
}
|
|
}
|
|
throw new Error('Unexpected output when retrieving default branch');
|
|
});
|
|
}
|
|
getWorkingDirectory() {
|
|
return this.workingDirectory;
|
|
}
|
|
init() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield this.execGit(['init', this.workingDirectory]);
|
|
});
|
|
}
|
|
isDetached() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Note, "branch --show-current" would be simpler but isn't available until Git 2.22
|
|
const output = yield this.execGit(['rev-parse', '--symbolic-full-name', '--verify', '--quiet', 'HEAD'], true);
|
|
return !output.stdout.trim().startsWith('refs/heads/');
|
|
});
|
|
}
|
|
lfsFetch(ref) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['lfs', 'fetch', 'origin', ref];
|
|
const that = this;
|
|
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
yield that.execGit(args);
|
|
}));
|
|
});
|
|
}
|
|
lfsInstall() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield this.execGit(['lfs', 'install', '--local']);
|
|
});
|
|
}
|
|
log1() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['log', '-1']);
|
|
return output.stdout;
|
|
});
|
|
}
|
|
remoteAdd(remoteName, remoteUrl) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield this.execGit(['remote', 'add', remoteName, remoteUrl]);
|
|
});
|
|
}
|
|
removeEnvironmentVariable(name) {
|
|
delete this.gitEnv[name];
|
|
}
|
|
/**
|
|
* Resolves a ref to a SHA. For a branch or lightweight tag, the commit SHA is returned.
|
|
* For an annotated tag, the tag SHA is returned.
|
|
* @param {string} ref For example: 'refs/heads/main' or '/refs/tags/v1'
|
|
* @returns {Promise<string>}
|
|
*/
|
|
revParse(ref) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['rev-parse', ref]);
|
|
return output.stdout.trim();
|
|
});
|
|
}
|
|
setEnvironmentVariable(name, value) {
|
|
this.gitEnv[name] = value;
|
|
}
|
|
shaExists(sha) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['rev-parse', '--verify', '--quiet', `${sha}^{object}`];
|
|
const output = yield this.execGit(args, true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
submoduleForeach(command, recursive) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['submodule', 'foreach'];
|
|
if (recursive) {
|
|
args.push('--recursive');
|
|
}
|
|
args.push(command);
|
|
const output = yield this.execGit(args);
|
|
return output.stdout;
|
|
});
|
|
}
|
|
submoduleSync(recursive) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['submodule', 'sync'];
|
|
if (recursive) {
|
|
args.push('--recursive');
|
|
}
|
|
yield this.execGit(args);
|
|
});
|
|
}
|
|
submoduleUpdate(fetchDepth, recursive) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const args = ['-c', 'protocol.version=2'];
|
|
args.push('submodule', 'update', '--init', '--force');
|
|
if (fetchDepth > 0) {
|
|
args.push(`--depth=${fetchDepth}`);
|
|
}
|
|
if (recursive) {
|
|
args.push('--recursive');
|
|
}
|
|
yield this.execGit(args);
|
|
});
|
|
}
|
|
tagExists(pattern) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['tag', '--list', pattern]);
|
|
return !!output.stdout.trim();
|
|
});
|
|
}
|
|
tryClean() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['clean', '-ffdx'], true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
tryConfigUnset(configKey, globalConfig) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit([
|
|
'config',
|
|
globalConfig ? '--global' : '--local',
|
|
'--unset-all',
|
|
configKey
|
|
], true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
tryDisableAutomaticGarbageCollection() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['config', '--local', 'gc.auto', '0'], true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
tryGetFetchUrl() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['config', '--local', '--get', 'remote.origin.url'], true);
|
|
if (output.exitCode !== 0) {
|
|
return '';
|
|
}
|
|
const stdout = output.stdout.trim();
|
|
if (stdout.includes('\n')) {
|
|
return '';
|
|
}
|
|
return stdout;
|
|
});
|
|
}
|
|
tryReset() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const output = yield this.execGit(['reset', '--hard', 'HEAD'], true);
|
|
return output.exitCode === 0;
|
|
});
|
|
}
|
|
static createCommandManager(workingDirectory, lfs) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const result = new GitCommandManager();
|
|
yield result.initializeCommandManager(workingDirectory, lfs);
|
|
return result;
|
|
});
|
|
}
|
|
execGit(args, allowAllExitCodes = false) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
fshelper.directoryExistsSync(this.workingDirectory, true);
|
|
const result = new GitOutput();
|
|
const env = {};
|
|
for (const key of Object.keys(process.env)) {
|
|
env[key] = process.env[key];
|
|
}
|
|
for (const key of Object.keys(this.gitEnv)) {
|
|
env[key] = this.gitEnv[key];
|
|
}
|
|
const stdout = [];
|
|
const options = {
|
|
cwd: this.workingDirectory,
|
|
env,
|
|
ignoreReturnCode: allowAllExitCodes,
|
|
listeners: {
|
|
stdout: (data) => {
|
|
stdout.push(data.toString());
|
|
}
|
|
}
|
|
};
|
|
result.exitCode = yield exec.exec(`"${this.gitPath}"`, args, options);
|
|
result.stdout = stdout.join('');
|
|
return result;
|
|
});
|
|
}
|
|
initializeCommandManager(workingDirectory, lfs) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
this.workingDirectory = workingDirectory;
|
|
// Git-lfs will try to pull down assets if any of the local/user/system setting exist.
|
|
// If the user didn't enable `LFS` in their pipeline definition, disable LFS fetch/checkout.
|
|
this.lfs = lfs;
|
|
if (!this.lfs) {
|
|
this.gitEnv['GIT_LFS_SKIP_SMUDGE'] = '1';
|
|
}
|
|
this.gitPath = yield io.which('git', true);
|
|
// Git version
|
|
core.debug('Getting git version');
|
|
let gitVersion = new git_version_1.GitVersion();
|
|
let gitOutput = yield this.execGit(['version']);
|
|
let stdout = gitOutput.stdout.trim();
|
|
if (!stdout.includes('\n')) {
|
|
const match = stdout.match(/\d+\.\d+(\.\d+)?/);
|
|
if (match) {
|
|
gitVersion = new git_version_1.GitVersion(match[0]);
|
|
}
|
|
}
|
|
if (!gitVersion.isValid()) {
|
|
throw new Error('Unable to determine git version');
|
|
}
|
|
// Minimum git version
|
|
if (!gitVersion.checkMinimum(exports.MinimumGitVersion)) {
|
|
throw new Error(`Minimum required git version is ${exports.MinimumGitVersion}. Your git ('${this.gitPath}') is ${gitVersion}`);
|
|
}
|
|
if (this.lfs) {
|
|
// Git-lfs version
|
|
core.debug('Getting git-lfs version');
|
|
let gitLfsVersion = new git_version_1.GitVersion();
|
|
const gitLfsPath = yield io.which('git-lfs', true);
|
|
gitOutput = yield this.execGit(['lfs', 'version']);
|
|
stdout = gitOutput.stdout.trim();
|
|
if (!stdout.includes('\n')) {
|
|
const match = stdout.match(/\d+\.\d+(\.\d+)?/);
|
|
if (match) {
|
|
gitLfsVersion = new git_version_1.GitVersion(match[0]);
|
|
}
|
|
}
|
|
if (!gitLfsVersion.isValid()) {
|
|
throw new Error('Unable to determine git-lfs version');
|
|
}
|
|
// Minimum git-lfs version
|
|
// Note:
|
|
// - Auth header not supported before 2.1
|
|
const minimumGitLfsVersion = new git_version_1.GitVersion('2.1');
|
|
if (!gitLfsVersion.checkMinimum(minimumGitLfsVersion)) {
|
|
throw new Error(`Minimum required git-lfs version is ${minimumGitLfsVersion}. Your git-lfs ('${gitLfsPath}') is ${gitLfsVersion}`);
|
|
}
|
|
}
|
|
// Set the user agent
|
|
const gitHttpUserAgent = `git/${gitVersion} (github-actions-checkout)`;
|
|
core.debug(`Set git useragent to: ${gitHttpUserAgent}`);
|
|
this.gitEnv['GIT_HTTP_USER_AGENT'] = gitHttpUserAgent;
|
|
});
|
|
}
|
|
}
|
|
class GitOutput {
|
|
constructor() {
|
|
this.stdout = '';
|
|
this.exitCode = 0;
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 293:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const fsHelper = __importStar(__webpack_require__(618));
|
|
const gitAuthHelper = __importStar(__webpack_require__(287));
|
|
const gitCommandManager = __importStar(__webpack_require__(289));
|
|
const gitDirectoryHelper = __importStar(__webpack_require__(438));
|
|
const githubApiHelper = __importStar(__webpack_require__(464));
|
|
const io = __importStar(__webpack_require__(1));
|
|
const path = __importStar(__webpack_require__(622));
|
|
const refHelper = __importStar(__webpack_require__(227));
|
|
const stateHelper = __importStar(__webpack_require__(153));
|
|
const urlHelper = __importStar(__webpack_require__(81));
|
|
function getSource(settings) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Repository URL
|
|
core.info(`Syncing repository: ${settings.repositoryOwner}/${settings.repositoryName}`);
|
|
const repositoryUrl = urlHelper.getFetchUrl(settings);
|
|
// Remove conflicting file path
|
|
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
|
|
yield io.rmRF(settings.repositoryPath);
|
|
}
|
|
// Create directory
|
|
let isExisting = true;
|
|
if (!fsHelper.directoryExistsSync(settings.repositoryPath)) {
|
|
isExisting = false;
|
|
yield io.mkdirP(settings.repositoryPath);
|
|
}
|
|
// Git command manager
|
|
core.startGroup('Getting Git version info');
|
|
const git = yield getGitCommandManager(settings);
|
|
core.endGroup();
|
|
// Prepare existing directory, otherwise recreate
|
|
if (isExisting) {
|
|
yield gitDirectoryHelper.prepareExistingDirectory(git, settings.repositoryPath, repositoryUrl, settings.clean, settings.ref);
|
|
}
|
|
if (!git) {
|
|
// Downloading using REST API
|
|
core.info(`The repository will be downloaded using the GitHub REST API`);
|
|
core.info(`To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`);
|
|
if (settings.submodules) {
|
|
throw new Error(`Input 'submodules' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH.`);
|
|
}
|
|
else if (settings.sshKey) {
|
|
throw new Error(`Input 'ssh-key' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH.`);
|
|
}
|
|
yield githubApiHelper.downloadRepository(settings.authToken, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.repositoryPath);
|
|
return;
|
|
}
|
|
// Save state for POST action
|
|
stateHelper.setRepositoryPath(settings.repositoryPath);
|
|
// Initialize the repository
|
|
if (!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))) {
|
|
core.startGroup('Initializing the repository');
|
|
yield git.init();
|
|
yield git.remoteAdd('origin', repositoryUrl);
|
|
core.endGroup();
|
|
}
|
|
// Disable automatic garbage collection
|
|
core.startGroup('Disabling automatic garbage collection');
|
|
if (!(yield git.tryDisableAutomaticGarbageCollection())) {
|
|
core.warning(`Unable to turn off git automatic garbage collection. The git fetch operation may trigger garbage collection and cause a delay.`);
|
|
}
|
|
core.endGroup();
|
|
const authHelper = gitAuthHelper.createAuthHelper(git, settings);
|
|
try {
|
|
// Configure auth
|
|
core.startGroup('Setting up auth');
|
|
yield authHelper.configureAuth();
|
|
core.endGroup();
|
|
// Determine the default branch
|
|
if (!settings.ref && !settings.commit) {
|
|
core.startGroup('Determining the default branch');
|
|
if (settings.sshKey) {
|
|
settings.ref = yield git.getDefaultBranch(repositoryUrl);
|
|
}
|
|
else {
|
|
settings.ref = yield githubApiHelper.getDefaultBranch(settings.authToken, settings.repositoryOwner, settings.repositoryName);
|
|
}
|
|
core.endGroup();
|
|
}
|
|
// LFS install
|
|
if (settings.lfs) {
|
|
yield git.lfsInstall();
|
|
}
|
|
// Fetch
|
|
core.startGroup('Fetching the repository');
|
|
if (settings.fetchDepth <= 0) {
|
|
// Fetch all branches and tags
|
|
let refSpec = refHelper.getRefSpecForAllHistory(settings.ref, settings.commit);
|
|
yield git.fetch(refSpec);
|
|
// When all history is fetched, the ref we're interested in may have moved to a different
|
|
// commit (push or force push). If so, fetch again with a targeted refspec.
|
|
if (!(yield refHelper.testRef(git, settings.ref, settings.commit))) {
|
|
refSpec = refHelper.getRefSpec(settings.ref, settings.commit);
|
|
yield git.fetch(refSpec);
|
|
}
|
|
}
|
|
else {
|
|
const refSpec = refHelper.getRefSpec(settings.ref, settings.commit);
|
|
yield git.fetch(refSpec, settings.fetchDepth);
|
|
}
|
|
core.endGroup();
|
|
// Checkout info
|
|
core.startGroup('Determining the checkout info');
|
|
const checkoutInfo = yield refHelper.getCheckoutInfo(git, settings.ref, settings.commit);
|
|
core.endGroup();
|
|
// LFS fetch
|
|
// Explicit lfs-fetch to avoid slow checkout (fetches one lfs object at a time).
|
|
// Explicit lfs fetch will fetch lfs objects in parallel.
|
|
if (settings.lfs) {
|
|
core.startGroup('Fetching LFS objects');
|
|
yield git.lfsFetch(checkoutInfo.startPoint || checkoutInfo.ref);
|
|
core.endGroup();
|
|
}
|
|
// Checkout
|
|
core.startGroup('Checking out the ref');
|
|
yield git.checkout(checkoutInfo.ref, checkoutInfo.startPoint);
|
|
core.endGroup();
|
|
// Submodules
|
|
if (settings.submodules) {
|
|
try {
|
|
// Temporarily override global config
|
|
core.startGroup('Setting up auth for fetching submodules');
|
|
yield authHelper.configureGlobalAuth();
|
|
core.endGroup();
|
|
// Checkout submodules
|
|
core.startGroup('Fetching submodules');
|
|
yield git.submoduleSync(settings.nestedSubmodules);
|
|
yield git.submoduleUpdate(settings.fetchDepth, settings.nestedSubmodules);
|
|
yield git.submoduleForeach('git config --local gc.auto 0', settings.nestedSubmodules);
|
|
core.endGroup();
|
|
// Persist credentials
|
|
if (settings.persistCredentials) {
|
|
core.startGroup('Persisting credentials for submodules');
|
|
yield authHelper.configureSubmoduleAuth();
|
|
core.endGroup();
|
|
}
|
|
}
|
|
finally {
|
|
// Remove temporary global config override
|
|
yield authHelper.removeGlobalAuth();
|
|
}
|
|
}
|
|
// Dump some info about the checked out commit
|
|
const commitInfo = yield git.log1();
|
|
// Check for incorrect pull request merge commit
|
|
yield refHelper.checkCommitInfo(settings.authToken, commitInfo, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit);
|
|
}
|
|
finally {
|
|
// Remove auth
|
|
if (!settings.persistCredentials) {
|
|
core.startGroup('Removing auth');
|
|
yield authHelper.removeAuth();
|
|
core.endGroup();
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.getSource = getSource;
|
|
function cleanup(repositoryPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Repo exists?
|
|
if (!repositoryPath ||
|
|
!fsHelper.fileExistsSync(path.join(repositoryPath, '.git', 'config'))) {
|
|
return;
|
|
}
|
|
let git;
|
|
try {
|
|
git = yield gitCommandManager.createCommandManager(repositoryPath, false);
|
|
}
|
|
catch (_a) {
|
|
return;
|
|
}
|
|
// Remove auth
|
|
const authHelper = gitAuthHelper.createAuthHelper(git);
|
|
yield authHelper.removeAuth();
|
|
});
|
|
}
|
|
exports.cleanup = cleanup;
|
|
function getGitCommandManager(settings) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
core.info(`Working directory is '${settings.repositoryPath}'`);
|
|
try {
|
|
return yield gitCommandManager.createCommandManager(settings.repositoryPath, settings.lfs);
|
|
}
|
|
catch (err) {
|
|
// Git is required for LFS
|
|
if (settings.lfs) {
|
|
throw err;
|
|
}
|
|
// Otherwise fallback to REST API
|
|
return undefined;
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 294:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = parseOptions;
|
|
|
|
const { Deprecation } = __webpack_require__(692);
|
|
const { getUserAgent } = __webpack_require__(796);
|
|
const once = __webpack_require__(969);
|
|
|
|
const pkg = __webpack_require__(215);
|
|
|
|
const deprecateOptionsTimeout = once((log, deprecation) =>
|
|
log.warn(deprecation)
|
|
);
|
|
const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation));
|
|
const deprecateOptionsHeaders = once((log, deprecation) =>
|
|
log.warn(deprecation)
|
|
);
|
|
|
|
function parseOptions(options, log, hook) {
|
|
if (options.headers) {
|
|
options.headers = Object.keys(options.headers).reduce((newObj, key) => {
|
|
newObj[key.toLowerCase()] = options.headers[key];
|
|
return newObj;
|
|
}, {});
|
|
}
|
|
|
|
const clientDefaults = {
|
|
headers: options.headers || {},
|
|
request: options.request || {},
|
|
mediaType: {
|
|
previews: [],
|
|
format: ""
|
|
}
|
|
};
|
|
|
|
if (options.baseUrl) {
|
|
clientDefaults.baseUrl = options.baseUrl;
|
|
}
|
|
|
|
if (options.userAgent) {
|
|
clientDefaults.headers["user-agent"] = options.userAgent;
|
|
}
|
|
|
|
if (options.previews) {
|
|
clientDefaults.mediaType.previews = options.previews;
|
|
}
|
|
|
|
if (options.timeZone) {
|
|
clientDefaults.headers["time-zone"] = options.timeZone;
|
|
}
|
|
|
|
if (options.timeout) {
|
|
deprecateOptionsTimeout(
|
|
log,
|
|
new Deprecation(
|
|
"[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request"
|
|
)
|
|
);
|
|
clientDefaults.request.timeout = options.timeout;
|
|
}
|
|
|
|
if (options.agent) {
|
|
deprecateOptionsAgent(
|
|
log,
|
|
new Deprecation(
|
|
"[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request"
|
|
)
|
|
);
|
|
clientDefaults.request.agent = options.agent;
|
|
}
|
|
|
|
if (options.headers) {
|
|
deprecateOptionsHeaders(
|
|
log,
|
|
new Deprecation(
|
|
"[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request"
|
|
)
|
|
);
|
|
}
|
|
|
|
const userAgentOption = clientDefaults.headers["user-agent"];
|
|
const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`;
|
|
|
|
clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent]
|
|
.filter(Boolean)
|
|
.join(" ");
|
|
|
|
clientDefaults.request.hook = hook.bind(null, "request");
|
|
|
|
return clientDefaults;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 297:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = class HttpError extends Error {
|
|
constructor (message, code, headers) {
|
|
super(message)
|
|
|
|
// Maintains proper stack trace (only available on V8)
|
|
/* istanbul ignore next */
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor)
|
|
}
|
|
|
|
this.name = 'HttpError'
|
|
this.code = code
|
|
this.headers = headers
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 299:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
const VERSION = "1.1.2";
|
|
|
|
/**
|
|
* Some “list” response that can be paginated have a different response structure
|
|
*
|
|
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
|
* /installation/repositories also has `repository_selection`), as well as a key with
|
|
* the list of the items which name varies from endpoint to endpoint:
|
|
*
|
|
* - https://developer.github.com/v3/search/#example (key `items`)
|
|
* - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
|
|
* - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
|
|
* - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
|
|
* - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
|
|
*
|
|
* Octokit normalizes these responses so that paginated results are always returned following
|
|
* the same structure. One challenge is that if the list response has only one page, no Link
|
|
* header is provided, so this header alone is not sufficient to check wether a response is
|
|
* paginated or not. For the exceptions with the namespace, a fallback check for the route
|
|
* paths has to be added in order to normalize the response. We cannot check for the total_count
|
|
* property because it also exists in the response of Get the combined status for a specific ref.
|
|
*/
|
|
const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/];
|
|
function normalizePaginatedListResponse(octokit, url, response) {
|
|
const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
|
|
const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
|
|
if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way
|
|
// to retrieve the same information.
|
|
|
|
const incompleteResults = response.data.incomplete_results;
|
|
const repositorySelection = response.data.repository_selection;
|
|
const totalCount = response.data.total_count;
|
|
delete response.data.incomplete_results;
|
|
delete response.data.repository_selection;
|
|
delete response.data.total_count;
|
|
const namespaceKey = Object.keys(response.data)[0];
|
|
const data = response.data[namespaceKey];
|
|
response.data = data;
|
|
|
|
if (typeof incompleteResults !== "undefined") {
|
|
response.data.incomplete_results = incompleteResults;
|
|
}
|
|
|
|
if (typeof repositorySelection !== "undefined") {
|
|
response.data.repository_selection = repositorySelection;
|
|
}
|
|
|
|
response.data.total_count = totalCount;
|
|
Object.defineProperty(response.data, namespaceKey, {
|
|
get() {
|
|
octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
|
|
return Array.from(data);
|
|
}
|
|
|
|
});
|
|
}
|
|
|
|
function iterator(octokit, route, parameters) {
|
|
const options = octokit.request.endpoint(route, parameters);
|
|
const method = options.method;
|
|
const headers = options.headers;
|
|
let url = options.url;
|
|
return {
|
|
[Symbol.asyncIterator]: () => ({
|
|
next() {
|
|
if (!url) {
|
|
return Promise.resolve({
|
|
done: true
|
|
});
|
|
}
|
|
|
|
return octokit.request({
|
|
method,
|
|
url,
|
|
headers
|
|
}).then(response => {
|
|
normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format:
|
|
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
|
|
// sets `url` to undefined if "next" URL is not present or `link` header is not set
|
|
|
|
url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
|
|
return {
|
|
value: response
|
|
};
|
|
});
|
|
}
|
|
|
|
})
|
|
};
|
|
}
|
|
|
|
function paginate(octokit, route, parameters, mapFn) {
|
|
if (typeof parameters === "function") {
|
|
mapFn = parameters;
|
|
parameters = undefined;
|
|
}
|
|
|
|
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
|
|
}
|
|
|
|
function gather(octokit, results, iterator, mapFn) {
|
|
return iterator.next().then(result => {
|
|
if (result.done) {
|
|
return results;
|
|
}
|
|
|
|
let earlyExit = false;
|
|
|
|
function done() {
|
|
earlyExit = true;
|
|
}
|
|
|
|
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
|
|
|
|
if (earlyExit) {
|
|
return results;
|
|
}
|
|
|
|
return gather(octokit, results, iterator, mapFn);
|
|
});
|
|
}
|
|
|
|
/**
|
|
* @param octokit Octokit instance
|
|
* @param options Options passed to Octokit constructor
|
|
*/
|
|
|
|
function paginateRest(octokit) {
|
|
return {
|
|
paginate: Object.assign(paginate.bind(null, octokit), {
|
|
iterator: iterator.bind(null, octokit)
|
|
})
|
|
};
|
|
}
|
|
paginateRest.VERSION = VERSION;
|
|
|
|
exports.paginateRest = paginateRest;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 323:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
var isStream = module.exports = function (stream) {
|
|
return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function';
|
|
};
|
|
|
|
isStream.writable = function (stream) {
|
|
return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object';
|
|
};
|
|
|
|
isStream.readable = function (stream) {
|
|
return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object';
|
|
};
|
|
|
|
isStream.duplex = function (stream) {
|
|
return isStream.writable(stream) && isStream.readable(stream);
|
|
};
|
|
|
|
isStream.transform = function (stream) {
|
|
return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object';
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 335:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = __webpack_require__(179);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 336:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = hasLastPage
|
|
|
|
const deprecate = __webpack_require__(370)
|
|
const getPageLinks = __webpack_require__(577)
|
|
|
|
function hasLastPage (link) {
|
|
deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
|
|
return getPageLinks(link).last
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 348:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = validate;
|
|
|
|
const { RequestError } = __webpack_require__(497);
|
|
const get = __webpack_require__(854);
|
|
const set = __webpack_require__(883);
|
|
|
|
function validate(octokit, options) {
|
|
if (!options.request.validate) {
|
|
return;
|
|
}
|
|
const { validate: params } = options.request;
|
|
|
|
Object.keys(params).forEach(parameterName => {
|
|
const parameter = get(params, parameterName);
|
|
|
|
const expectedType = parameter.type;
|
|
let parentParameterName;
|
|
let parentValue;
|
|
let parentParamIsPresent = true;
|
|
let parentParameterIsArray = false;
|
|
|
|
if (/\./.test(parameterName)) {
|
|
parentParameterName = parameterName.replace(/\.[^.]+$/, "");
|
|
parentParameterIsArray = parentParameterName.slice(-2) === "[]";
|
|
if (parentParameterIsArray) {
|
|
parentParameterName = parentParameterName.slice(0, -2);
|
|
}
|
|
parentValue = get(options, parentParameterName);
|
|
parentParamIsPresent =
|
|
parentParameterName === "headers" ||
|
|
(typeof parentValue === "object" && parentValue !== null);
|
|
}
|
|
|
|
const values = parentParameterIsArray
|
|
? (get(options, parentParameterName) || []).map(
|
|
value => value[parameterName.split(/\./).pop()]
|
|
)
|
|
: [get(options, parameterName)];
|
|
|
|
values.forEach((value, i) => {
|
|
const valueIsPresent = typeof value !== "undefined";
|
|
const valueIsNull = value === null;
|
|
const currentParameterName = parentParameterIsArray
|
|
? parameterName.replace(/\[\]/, `[${i}]`)
|
|
: parameterName;
|
|
|
|
if (!parameter.required && !valueIsPresent) {
|
|
return;
|
|
}
|
|
|
|
// if the parent parameter is of type object but allows null
|
|
// then the child parameters can be ignored
|
|
if (!parentParamIsPresent) {
|
|
return;
|
|
}
|
|
|
|
if (parameter.allowNull && valueIsNull) {
|
|
return;
|
|
}
|
|
|
|
if (!parameter.allowNull && valueIsNull) {
|
|
throw new RequestError(
|
|
`'${currentParameterName}' cannot be null`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
if (parameter.required && !valueIsPresent) {
|
|
throw new RequestError(
|
|
`Empty value for parameter '${currentParameterName}': ${JSON.stringify(
|
|
value
|
|
)}`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
// parse to integer before checking for enum
|
|
// so that string "1" will match enum with number 1
|
|
if (expectedType === "integer") {
|
|
const unparsedValue = value;
|
|
value = parseInt(value, 10);
|
|
if (isNaN(value)) {
|
|
throw new RequestError(
|
|
`Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
|
|
unparsedValue
|
|
)} is NaN`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) {
|
|
throw new RequestError(
|
|
`Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
|
|
value
|
|
)}`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
if (parameter.validation) {
|
|
const regex = new RegExp(parameter.validation);
|
|
if (!regex.test(value)) {
|
|
throw new RequestError(
|
|
`Invalid value for parameter '${currentParameterName}': ${JSON.stringify(
|
|
value
|
|
)}`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
if (expectedType === "object" && typeof value === "string") {
|
|
try {
|
|
value = JSON.parse(value);
|
|
} catch (exception) {
|
|
throw new RequestError(
|
|
`JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify(
|
|
value
|
|
)}`,
|
|
400,
|
|
{
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
set(options, parameter.mapTo || currentParameterName, value);
|
|
});
|
|
});
|
|
|
|
return options;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 349:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationRequestError;
|
|
|
|
const { RequestError } = __webpack_require__(497);
|
|
|
|
function authenticationRequestError(state, error, options) {
|
|
/* istanbul ignore next */
|
|
if (!error.headers) throw error;
|
|
|
|
const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
|
|
// handle "2FA required" error only
|
|
if (error.status !== 401 || !otpRequired) {
|
|
throw error;
|
|
}
|
|
|
|
if (
|
|
error.status === 401 &&
|
|
otpRequired &&
|
|
error.request &&
|
|
error.request.headers["x-github-otp"]
|
|
) {
|
|
throw new RequestError(
|
|
"Invalid one-time password for two-factor authentication",
|
|
401,
|
|
{
|
|
headers: error.headers,
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
if (typeof state.auth.on2fa !== "function") {
|
|
throw new RequestError(
|
|
"2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
|
|
401,
|
|
{
|
|
headers: error.headers,
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
return Promise.resolve()
|
|
.then(() => {
|
|
return state.auth.on2fa();
|
|
})
|
|
.then(oneTimePassword => {
|
|
const newOptions = Object.assign(options, {
|
|
headers: Object.assign(
|
|
{ "x-github-otp": oneTimePassword },
|
|
options.headers
|
|
)
|
|
});
|
|
return state.octokit.request(newOptions);
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 357:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("assert");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 363:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = register
|
|
|
|
function register (state, name, method, options) {
|
|
if (typeof method !== 'function') {
|
|
throw new Error('method for before hook must be a function')
|
|
}
|
|
|
|
if (!options) {
|
|
options = {}
|
|
}
|
|
|
|
if (Array.isArray(name)) {
|
|
return name.reverse().reduce(function (callback, name) {
|
|
return register.bind(null, state, name, callback, options)
|
|
}, method)()
|
|
}
|
|
|
|
return Promise.resolve()
|
|
.then(function () {
|
|
if (!state.registry[name]) {
|
|
return method(options)
|
|
}
|
|
|
|
return (state.registry[name]).reduce(function (method, registered) {
|
|
return registered.hook.bind(null, method, options)
|
|
}, method)()
|
|
})
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 368:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = function atob(str) {
|
|
return Buffer.from(str, 'base64').toString('binary')
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 370:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = deprecate
|
|
|
|
const loggedMessages = {}
|
|
|
|
function deprecate (message) {
|
|
if (loggedMessages[message]) {
|
|
return
|
|
}
|
|
|
|
console.warn(`DEPRECATED (@octokit/rest): ${message}`)
|
|
loggedMessages[message] = 1
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 385:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var isPlainObject = _interopDefault(__webpack_require__(696));
|
|
var universalUserAgent = __webpack_require__(562);
|
|
|
|
function lowercaseKeys(object) {
|
|
if (!object) {
|
|
return {};
|
|
}
|
|
|
|
return Object.keys(object).reduce((newObj, key) => {
|
|
newObj[key.toLowerCase()] = object[key];
|
|
return newObj;
|
|
}, {});
|
|
}
|
|
|
|
function mergeDeep(defaults, options) {
|
|
const result = Object.assign({}, defaults);
|
|
Object.keys(options).forEach(key => {
|
|
if (isPlainObject(options[key])) {
|
|
if (!(key in defaults)) Object.assign(result, {
|
|
[key]: options[key]
|
|
});else result[key] = mergeDeep(defaults[key], options[key]);
|
|
} else {
|
|
Object.assign(result, {
|
|
[key]: options[key]
|
|
});
|
|
}
|
|
});
|
|
return result;
|
|
}
|
|
|
|
function merge(defaults, route, options) {
|
|
if (typeof route === "string") {
|
|
let [method, url] = route.split(" ");
|
|
options = Object.assign(url ? {
|
|
method,
|
|
url
|
|
} : {
|
|
url: method
|
|
}, options);
|
|
} else {
|
|
options = Object.assign({}, route);
|
|
} // lowercase header names before merging with defaults to avoid duplicates
|
|
|
|
|
|
options.headers = lowercaseKeys(options.headers);
|
|
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
|
|
|
|
if (defaults && defaults.mediaType.previews.length) {
|
|
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
|
|
}
|
|
|
|
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
|
|
return mergedOptions;
|
|
}
|
|
|
|
function addQueryParameters(url, parameters) {
|
|
const separator = /\?/.test(url) ? "&" : "?";
|
|
const names = Object.keys(parameters);
|
|
|
|
if (names.length === 0) {
|
|
return url;
|
|
}
|
|
|
|
return url + separator + names.map(name => {
|
|
if (name === "q") {
|
|
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
|
|
}
|
|
|
|
return `${name}=${encodeURIComponent(parameters[name])}`;
|
|
}).join("&");
|
|
}
|
|
|
|
const urlVariableRegex = /\{[^}]+\}/g;
|
|
|
|
function removeNonChars(variableName) {
|
|
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
|
|
}
|
|
|
|
function extractUrlVariableNames(url) {
|
|
const matches = url.match(urlVariableRegex);
|
|
|
|
if (!matches) {
|
|
return [];
|
|
}
|
|
|
|
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
|
|
}
|
|
|
|
function omit(object, keysToOmit) {
|
|
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
|
|
obj[key] = object[key];
|
|
return obj;
|
|
}, {});
|
|
}
|
|
|
|
// Based on https://github.com/bramstein/url-template, licensed under BSD
|
|
// TODO: create separate package.
|
|
//
|
|
// Copyright (c) 2012-2014, Bram Stein
|
|
// All rights reserved.
|
|
// Redistribution and use in source and binary forms, with or without
|
|
// modification, are permitted provided that the following conditions
|
|
// are met:
|
|
// 1. Redistributions of source code must retain the above copyright
|
|
// notice, this list of conditions and the following disclaimer.
|
|
// 2. Redistributions in binary form must reproduce the above copyright
|
|
// notice, this list of conditions and the following disclaimer in the
|
|
// documentation and/or other materials provided with the distribution.
|
|
// 3. The name of the author may not be used to endorse or promote products
|
|
// derived from this software without specific prior written permission.
|
|
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
|
|
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
|
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
|
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
|
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
/* istanbul ignore file */
|
|
function encodeReserved(str) {
|
|
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
|
|
if (!/%[0-9A-Fa-f]/.test(part)) {
|
|
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
|
|
}
|
|
|
|
return part;
|
|
}).join("");
|
|
}
|
|
|
|
function encodeUnreserved(str) {
|
|
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
|
|
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
|
});
|
|
}
|
|
|
|
function encodeValue(operator, value, key) {
|
|
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
|
|
|
|
if (key) {
|
|
return encodeUnreserved(key) + "=" + value;
|
|
} else {
|
|
return value;
|
|
}
|
|
}
|
|
|
|
function isDefined(value) {
|
|
return value !== undefined && value !== null;
|
|
}
|
|
|
|
function isKeyOperator(operator) {
|
|
return operator === ";" || operator === "&" || operator === "?";
|
|
}
|
|
|
|
function getValues(context, operator, key, modifier) {
|
|
var value = context[key],
|
|
result = [];
|
|
|
|
if (isDefined(value) && value !== "") {
|
|
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
value = value.toString();
|
|
|
|
if (modifier && modifier !== "*") {
|
|
value = value.substring(0, parseInt(modifier, 10));
|
|
}
|
|
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|
} else {
|
|
if (modifier === "*") {
|
|
if (Array.isArray(value)) {
|
|
value.filter(isDefined).forEach(function (value) {
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|
});
|
|
} else {
|
|
Object.keys(value).forEach(function (k) {
|
|
if (isDefined(value[k])) {
|
|
result.push(encodeValue(operator, value[k], k));
|
|
}
|
|
});
|
|
}
|
|
} else {
|
|
const tmp = [];
|
|
|
|
if (Array.isArray(value)) {
|
|
value.filter(isDefined).forEach(function (value) {
|
|
tmp.push(encodeValue(operator, value));
|
|
});
|
|
} else {
|
|
Object.keys(value).forEach(function (k) {
|
|
if (isDefined(value[k])) {
|
|
tmp.push(encodeUnreserved(k));
|
|
tmp.push(encodeValue(operator, value[k].toString()));
|
|
}
|
|
});
|
|
}
|
|
|
|
if (isKeyOperator(operator)) {
|
|
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
|
|
} else if (tmp.length !== 0) {
|
|
result.push(tmp.join(","));
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
if (operator === ";") {
|
|
if (isDefined(value)) {
|
|
result.push(encodeUnreserved(key));
|
|
}
|
|
} else if (value === "" && (operator === "&" || operator === "?")) {
|
|
result.push(encodeUnreserved(key) + "=");
|
|
} else if (value === "") {
|
|
result.push("");
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
function parseUrl(template) {
|
|
return {
|
|
expand: expand.bind(null, template)
|
|
};
|
|
}
|
|
|
|
function expand(template, context) {
|
|
var operators = ["+", "#", ".", "/", ";", "?", "&"];
|
|
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
|
|
if (expression) {
|
|
let operator = "";
|
|
const values = [];
|
|
|
|
if (operators.indexOf(expression.charAt(0)) !== -1) {
|
|
operator = expression.charAt(0);
|
|
expression = expression.substr(1);
|
|
}
|
|
|
|
expression.split(/,/g).forEach(function (variable) {
|
|
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
|
|
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
|
|
});
|
|
|
|
if (operator && operator !== "+") {
|
|
var separator = ",";
|
|
|
|
if (operator === "?") {
|
|
separator = "&";
|
|
} else if (operator !== "#") {
|
|
separator = operator;
|
|
}
|
|
|
|
return (values.length !== 0 ? operator : "") + values.join(separator);
|
|
} else {
|
|
return values.join(",");
|
|
}
|
|
} else {
|
|
return encodeReserved(literal);
|
|
}
|
|
});
|
|
}
|
|
|
|
function parse(options) {
|
|
// https://fetch.spec.whatwg.org/#methods
|
|
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
|
|
|
|
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}");
|
|
let headers = Object.assign({}, options.headers);
|
|
let body;
|
|
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
|
|
|
|
const urlVariableNames = extractUrlVariableNames(url);
|
|
url = parseUrl(url).expand(parameters);
|
|
|
|
if (!/^http/.test(url)) {
|
|
url = options.baseUrl + url;
|
|
}
|
|
|
|
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
|
|
const remainingParameters = omit(parameters, omittedParameters);
|
|
const isBinaryRequset = /application\/octet-stream/i.test(headers.accept);
|
|
|
|
if (!isBinaryRequset) {
|
|
if (options.mediaType.format) {
|
|
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
|
|
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
|
|
}
|
|
|
|
if (options.mediaType.previews.length) {
|
|
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
|
|
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
|
|
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
|
|
return `application/vnd.github.${preview}-preview${format}`;
|
|
}).join(",");
|
|
}
|
|
} // for GET/HEAD requests, set URL query parameters from remaining parameters
|
|
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
|
|
|
|
|
|
if (["GET", "HEAD"].includes(method)) {
|
|
url = addQueryParameters(url, remainingParameters);
|
|
} else {
|
|
if ("data" in remainingParameters) {
|
|
body = remainingParameters.data;
|
|
} else {
|
|
if (Object.keys(remainingParameters).length) {
|
|
body = remainingParameters;
|
|
} else {
|
|
headers["content-length"] = 0;
|
|
}
|
|
}
|
|
} // default content-type for JSON if body is set
|
|
|
|
|
|
if (!headers["content-type"] && typeof body !== "undefined") {
|
|
headers["content-type"] = "application/json; charset=utf-8";
|
|
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
|
|
// fetch does not allow to set `content-length` header, but we can set body to an empty string
|
|
|
|
|
|
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
|
|
body = "";
|
|
} // Only return body/request keys if present
|
|
|
|
|
|
return Object.assign({
|
|
method,
|
|
url,
|
|
headers
|
|
}, typeof body !== "undefined" ? {
|
|
body
|
|
} : null, options.request ? {
|
|
request: options.request
|
|
} : null);
|
|
}
|
|
|
|
function endpointWithDefaults(defaults, route, options) {
|
|
return parse(merge(defaults, route, options));
|
|
}
|
|
|
|
function withDefaults(oldDefaults, newDefaults) {
|
|
const DEFAULTS = merge(oldDefaults, newDefaults);
|
|
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
|
|
return Object.assign(endpoint, {
|
|
DEFAULTS,
|
|
defaults: withDefaults.bind(null, DEFAULTS),
|
|
merge: merge.bind(null, DEFAULTS),
|
|
parse
|
|
});
|
|
}
|
|
|
|
const VERSION = "6.0.1";
|
|
|
|
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
|
|
// So we use RequestParameters and add method as additional required property.
|
|
|
|
const DEFAULTS = {
|
|
method: "GET",
|
|
baseUrl: "https://api.github.com",
|
|
headers: {
|
|
accept: "application/vnd.github.v3+json",
|
|
"user-agent": userAgent
|
|
},
|
|
mediaType: {
|
|
format: "",
|
|
previews: []
|
|
}
|
|
};
|
|
|
|
const endpoint = withDefaults(null, DEFAULTS);
|
|
|
|
exports.endpoint = endpoint;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 389:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __webpack_require__(747);
|
|
const shebangCommand = __webpack_require__(866);
|
|
|
|
function readShebang(command) {
|
|
// Read the first 150 bytes from the file
|
|
const size = 150;
|
|
let buffer;
|
|
|
|
if (Buffer.alloc) {
|
|
// Node.js v4.5+ / v5.10+
|
|
buffer = Buffer.alloc(size);
|
|
} else {
|
|
// Old Node.js API
|
|
buffer = new Buffer(size);
|
|
buffer.fill(0); // zero-fill
|
|
}
|
|
|
|
let fd;
|
|
|
|
try {
|
|
fd = fs.openSync(command, 'r');
|
|
fs.readSync(fd, buffer, 0, size, 0);
|
|
fs.closeSync(fd);
|
|
} catch (e) { /* Empty */ }
|
|
|
|
// Attempt to extract shebang (null is returned if not a shebang)
|
|
return shebangCommand(buffer.toString());
|
|
}
|
|
|
|
module.exports = readShebang;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 402:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = Octokit;
|
|
|
|
const { request } = __webpack_require__(753);
|
|
const Hook = __webpack_require__(523);
|
|
|
|
const parseClientOptions = __webpack_require__(294);
|
|
|
|
function Octokit(plugins, options) {
|
|
options = options || {};
|
|
const hook = new Hook.Collection();
|
|
const log = Object.assign(
|
|
{
|
|
debug: () => {},
|
|
info: () => {},
|
|
warn: console.warn,
|
|
error: console.error
|
|
},
|
|
options && options.log
|
|
);
|
|
const api = {
|
|
hook,
|
|
log,
|
|
request: request.defaults(parseClientOptions(options, log, hook))
|
|
};
|
|
|
|
plugins.forEach(pluginFunction => pluginFunction(api, options));
|
|
|
|
return api;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 413:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("stream");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 417:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("crypto");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 427:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
// Older verions of Node.js might not have `util.getSystemErrorName()`.
|
|
// In that case, fall back to a deprecated internal.
|
|
const util = __webpack_require__(669);
|
|
|
|
let uv;
|
|
|
|
if (typeof util.getSystemErrorName === 'function') {
|
|
module.exports = util.getSystemErrorName;
|
|
} else {
|
|
try {
|
|
uv = process.binding('uv');
|
|
|
|
if (typeof uv.errname !== 'function') {
|
|
throw new TypeError('uv.errname is not a function');
|
|
}
|
|
} catch (err) {
|
|
console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
|
|
uv = null;
|
|
}
|
|
|
|
module.exports = code => errname(uv, code);
|
|
}
|
|
|
|
// Used for testing the fallback behavior
|
|
module.exports.__test__ = errname;
|
|
|
|
function errname(uv, code) {
|
|
if (uv) {
|
|
return uv.errname(code);
|
|
}
|
|
|
|
if (!(code < 0)) {
|
|
throw new Error('err >= 0');
|
|
}
|
|
|
|
return `Unknown system error ${code}`;
|
|
}
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 430:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = octokitValidate;
|
|
|
|
const validate = __webpack_require__(348);
|
|
|
|
function octokitValidate(octokit) {
|
|
octokit.hook.before("request", validate.bind(null, octokit));
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 431:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const os = __webpack_require__(87);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ##[name key=value;key=value]message
|
|
*
|
|
* Examples:
|
|
* ##[warning]This is the user warning message
|
|
* ##[set-secret name=mypassword]definitelyNotAPassword!
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
// safely append the val - avoid blowing up when attempting to
|
|
// call .replace() if message is not a string for some reason
|
|
cmdStr += `${key}=${escape(`${val || ''}`)},`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += CMD_STRING;
|
|
// safely append the message - avoid blowing up when attempting to
|
|
// call .replace() if message is not a string for some reason
|
|
const message = `${this.message || ''}`;
|
|
cmdStr += escapeData(message);
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
|
|
}
|
|
function escape(s) {
|
|
return s
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/]/g, '%5D')
|
|
.replace(/;/g, '%3B');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 438:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const assert = __importStar(__webpack_require__(357));
|
|
const core = __importStar(__webpack_require__(470));
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const fsHelper = __importStar(__webpack_require__(618));
|
|
const io = __importStar(__webpack_require__(1));
|
|
const path = __importStar(__webpack_require__(622));
|
|
function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean, ref) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert.ok(repositoryPath, 'Expected repositoryPath to be defined');
|
|
assert.ok(repositoryUrl, 'Expected repositoryUrl to be defined');
|
|
// Indicates whether to delete the directory contents
|
|
let remove = false;
|
|
// Check whether using git or REST API
|
|
if (!git) {
|
|
remove = true;
|
|
}
|
|
// Fetch URL does not match
|
|
else if (!fsHelper.directoryExistsSync(path.join(repositoryPath, '.git')) ||
|
|
repositoryUrl !== (yield git.tryGetFetchUrl())) {
|
|
remove = true;
|
|
}
|
|
else {
|
|
// Delete any index.lock and shallow.lock left by a previously canceled run or crashed git process
|
|
const lockPaths = [
|
|
path.join(repositoryPath, '.git', 'index.lock'),
|
|
path.join(repositoryPath, '.git', 'shallow.lock')
|
|
];
|
|
for (const lockPath of lockPaths) {
|
|
try {
|
|
yield io.rmRF(lockPath);
|
|
}
|
|
catch (error) {
|
|
core.debug(`Unable to delete '${lockPath}'. ${error.message}`);
|
|
}
|
|
}
|
|
try {
|
|
core.startGroup('Removing previously created refs, to avoid conflicts');
|
|
// Checkout detached HEAD
|
|
if (!(yield git.isDetached())) {
|
|
yield git.checkoutDetach();
|
|
}
|
|
// Remove all refs/heads/*
|
|
let branches = yield git.branchList(false);
|
|
for (const branch of branches) {
|
|
yield git.branchDelete(false, branch);
|
|
}
|
|
// Remove any conflicting refs/remotes/origin/*
|
|
// Example 1: Consider ref is refs/heads/foo and previously fetched refs/remotes/origin/foo/bar
|
|
// Example 2: Consider ref is refs/heads/foo/bar and previously fetched refs/remotes/origin/foo
|
|
if (ref) {
|
|
ref = ref.startsWith('refs/') ? ref : `refs/heads/${ref}`;
|
|
if (ref.startsWith('refs/heads/')) {
|
|
const upperName1 = ref.toUpperCase().substr('REFS/HEADS/'.length);
|
|
const upperName1Slash = `${upperName1}/`;
|
|
branches = yield git.branchList(true);
|
|
for (const branch of branches) {
|
|
const upperName2 = branch.substr('origin/'.length).toUpperCase();
|
|
const upperName2Slash = `${upperName2}/`;
|
|
if (upperName1.startsWith(upperName2Slash) ||
|
|
upperName2.startsWith(upperName1Slash)) {
|
|
yield git.branchDelete(true, branch);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
core.endGroup();
|
|
// Clean
|
|
if (clean) {
|
|
core.startGroup('Cleaning the repository');
|
|
if (!(yield git.tryClean())) {
|
|
core.debug(`The clean command failed. This might be caused by: 1) path too long, 2) permission issue, or 3) file in use. For futher investigation, manually run 'git clean -ffdx' on the directory '${repositoryPath}'.`);
|
|
remove = true;
|
|
}
|
|
else if (!(yield git.tryReset())) {
|
|
remove = true;
|
|
}
|
|
core.endGroup();
|
|
if (remove) {
|
|
core.warning(`Unable to clean or reset the repository. The repository will be recreated instead.`);
|
|
}
|
|
}
|
|
}
|
|
catch (error) {
|
|
core.warning(`Unable to prepare the existing repository. The repository will be recreated instead.`);
|
|
remove = true;
|
|
}
|
|
}
|
|
if (remove) {
|
|
// Delete the contents of the directory. Don't delete the directory itself
|
|
// since it might be the current working directory.
|
|
core.info(`Deleting the contents of '${repositoryPath}'`);
|
|
for (const file of yield fs.promises.readdir(repositoryPath)) {
|
|
yield io.rmRF(path.join(repositoryPath, file));
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.prepareExistingDirectory = prepareExistingDirectory;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 453:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var once = __webpack_require__(969)
|
|
var eos = __webpack_require__(3)
|
|
var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
|
|
|
|
var noop = function () {}
|
|
var ancient = /^v?\.0/.test(process.version)
|
|
|
|
var isFn = function (fn) {
|
|
return typeof fn === 'function'
|
|
}
|
|
|
|
var isFS = function (stream) {
|
|
if (!ancient) return false // newer node version do not need to care about fs is a special way
|
|
if (!fs) return false // browser
|
|
return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
|
|
}
|
|
|
|
var isRequest = function (stream) {
|
|
return stream.setHeader && isFn(stream.abort)
|
|
}
|
|
|
|
var destroyer = function (stream, reading, writing, callback) {
|
|
callback = once(callback)
|
|
|
|
var closed = false
|
|
stream.on('close', function () {
|
|
closed = true
|
|
})
|
|
|
|
eos(stream, {readable: reading, writable: writing}, function (err) {
|
|
if (err) return callback(err)
|
|
closed = true
|
|
callback()
|
|
})
|
|
|
|
var destroyed = false
|
|
return function (err) {
|
|
if (closed) return
|
|
if (destroyed) return
|
|
destroyed = true
|
|
|
|
if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
|
|
if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
|
|
|
|
if (isFn(stream.destroy)) return stream.destroy()
|
|
|
|
callback(err || new Error('stream was destroyed'))
|
|
}
|
|
}
|
|
|
|
var call = function (fn) {
|
|
fn()
|
|
}
|
|
|
|
var pipe = function (from, to) {
|
|
return from.pipe(to)
|
|
}
|
|
|
|
var pump = function () {
|
|
var streams = Array.prototype.slice.call(arguments)
|
|
var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
|
|
|
|
if (Array.isArray(streams[0])) streams = streams[0]
|
|
if (streams.length < 2) throw new Error('pump requires two streams per minimum')
|
|
|
|
var error
|
|
var destroys = streams.map(function (stream, i) {
|
|
var reading = i < streams.length - 1
|
|
var writing = i > 0
|
|
return destroyer(stream, reading, writing, function (err) {
|
|
if (!error) error = err
|
|
if (err) destroys.forEach(call)
|
|
if (reading) return
|
|
destroys.forEach(call)
|
|
callback(error)
|
|
})
|
|
})
|
|
|
|
return streams.reduce(pipe)
|
|
}
|
|
|
|
module.exports = pump
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 454:
|
|
/***/ (function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var Stream = _interopDefault(__webpack_require__(413));
|
|
var http = _interopDefault(__webpack_require__(605));
|
|
var Url = _interopDefault(__webpack_require__(835));
|
|
var https = _interopDefault(__webpack_require__(34));
|
|
var zlib = _interopDefault(__webpack_require__(761));
|
|
|
|
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
|
|
|
|
// fix for "Readable" isn't a named export issue
|
|
const Readable = Stream.Readable;
|
|
|
|
const BUFFER = Symbol('buffer');
|
|
const TYPE = Symbol('type');
|
|
|
|
class Blob {
|
|
constructor() {
|
|
this[TYPE] = '';
|
|
|
|
const blobParts = arguments[0];
|
|
const options = arguments[1];
|
|
|
|
const buffers = [];
|
|
let size = 0;
|
|
|
|
if (blobParts) {
|
|
const a = blobParts;
|
|
const length = Number(a.length);
|
|
for (let i = 0; i < length; i++) {
|
|
const element = a[i];
|
|
let buffer;
|
|
if (element instanceof Buffer) {
|
|
buffer = element;
|
|
} else if (ArrayBuffer.isView(element)) {
|
|
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
|
|
} else if (element instanceof ArrayBuffer) {
|
|
buffer = Buffer.from(element);
|
|
} else if (element instanceof Blob) {
|
|
buffer = element[BUFFER];
|
|
} else {
|
|
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
|
|
}
|
|
size += buffer.length;
|
|
buffers.push(buffer);
|
|
}
|
|
}
|
|
|
|
this[BUFFER] = Buffer.concat(buffers);
|
|
|
|
let type = options && options.type !== undefined && String(options.type).toLowerCase();
|
|
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
|
this[TYPE] = type;
|
|
}
|
|
}
|
|
get size() {
|
|
return this[BUFFER].length;
|
|
}
|
|
get type() {
|
|
return this[TYPE];
|
|
}
|
|
text() {
|
|
return Promise.resolve(this[BUFFER].toString());
|
|
}
|
|
arrayBuffer() {
|
|
const buf = this[BUFFER];
|
|
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|
return Promise.resolve(ab);
|
|
}
|
|
stream() {
|
|
const readable = new Readable();
|
|
readable._read = function () {};
|
|
readable.push(this[BUFFER]);
|
|
readable.push(null);
|
|
return readable;
|
|
}
|
|
toString() {
|
|
return '[object Blob]';
|
|
}
|
|
slice() {
|
|
const size = this.size;
|
|
|
|
const start = arguments[0];
|
|
const end = arguments[1];
|
|
let relativeStart, relativeEnd;
|
|
if (start === undefined) {
|
|
relativeStart = 0;
|
|
} else if (start < 0) {
|
|
relativeStart = Math.max(size + start, 0);
|
|
} else {
|
|
relativeStart = Math.min(start, size);
|
|
}
|
|
if (end === undefined) {
|
|
relativeEnd = size;
|
|
} else if (end < 0) {
|
|
relativeEnd = Math.max(size + end, 0);
|
|
} else {
|
|
relativeEnd = Math.min(end, size);
|
|
}
|
|
const span = Math.max(relativeEnd - relativeStart, 0);
|
|
|
|
const buffer = this[BUFFER];
|
|
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
|
|
const blob = new Blob([], { type: arguments[2] });
|
|
blob[BUFFER] = slicedBuffer;
|
|
return blob;
|
|
}
|
|
}
|
|
|
|
Object.defineProperties(Blob.prototype, {
|
|
size: { enumerable: true },
|
|
type: { enumerable: true },
|
|
slice: { enumerable: true }
|
|
});
|
|
|
|
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
|
|
value: 'Blob',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
/**
|
|
* fetch-error.js
|
|
*
|
|
* FetchError interface for operational errors
|
|
*/
|
|
|
|
/**
|
|
* Create FetchError instance
|
|
*
|
|
* @param String message Error message for human
|
|
* @param String type Error type for machine
|
|
* @param String systemError For Node.js system error
|
|
* @return FetchError
|
|
*/
|
|
function FetchError(message, type, systemError) {
|
|
Error.call(this, message);
|
|
|
|
this.message = message;
|
|
this.type = type;
|
|
|
|
// when err.type is `system`, err.code contains system error code
|
|
if (systemError) {
|
|
this.code = this.errno = systemError.code;
|
|
}
|
|
|
|
// hide custom error implementation details from end-users
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
FetchError.prototype = Object.create(Error.prototype);
|
|
FetchError.prototype.constructor = FetchError;
|
|
FetchError.prototype.name = 'FetchError';
|
|
|
|
let convert;
|
|
try {
|
|
convert = __webpack_require__(18).convert;
|
|
} catch (e) {}
|
|
|
|
const INTERNALS = Symbol('Body internals');
|
|
|
|
// fix an issue where "PassThrough" isn't a named export for node <10
|
|
const PassThrough = Stream.PassThrough;
|
|
|
|
/**
|
|
* Body mixin
|
|
*
|
|
* Ref: https://fetch.spec.whatwg.org/#body
|
|
*
|
|
* @param Stream body Readable stream
|
|
* @param Object opts Response options
|
|
* @return Void
|
|
*/
|
|
function Body(body) {
|
|
var _this = this;
|
|
|
|
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
|
_ref$size = _ref.size;
|
|
|
|
let size = _ref$size === undefined ? 0 : _ref$size;
|
|
var _ref$timeout = _ref.timeout;
|
|
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
|
|
|
|
if (body == null) {
|
|
// body is undefined or null
|
|
body = null;
|
|
} else if (isURLSearchParams(body)) {
|
|
// body is a URLSearchParams
|
|
body = Buffer.from(body.toString());
|
|
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|
// body is ArrayBuffer
|
|
body = Buffer.from(body);
|
|
} else if (ArrayBuffer.isView(body)) {
|
|
// body is ArrayBufferView
|
|
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
|
|
} else if (body instanceof Stream) ; else {
|
|
// none of the above
|
|
// coerce to string then buffer
|
|
body = Buffer.from(String(body));
|
|
}
|
|
this[INTERNALS] = {
|
|
body,
|
|
disturbed: false,
|
|
error: null
|
|
};
|
|
this.size = size;
|
|
this.timeout = timeout;
|
|
|
|
if (body instanceof Stream) {
|
|
body.on('error', function (err) {
|
|
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
|
|
_this[INTERNALS].error = error;
|
|
});
|
|
}
|
|
}
|
|
|
|
Body.prototype = {
|
|
get body() {
|
|
return this[INTERNALS].body;
|
|
},
|
|
|
|
get bodyUsed() {
|
|
return this[INTERNALS].disturbed;
|
|
},
|
|
|
|
/**
|
|
* Decode response as ArrayBuffer
|
|
*
|
|
* @return Promise
|
|
*/
|
|
arrayBuffer() {
|
|
return consumeBody.call(this).then(function (buf) {
|
|
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Return raw response as Blob
|
|
*
|
|
* @return Promise
|
|
*/
|
|
blob() {
|
|
let ct = this.headers && this.headers.get('content-type') || '';
|
|
return consumeBody.call(this).then(function (buf) {
|
|
return Object.assign(
|
|
// Prevent copying
|
|
new Blob([], {
|
|
type: ct.toLowerCase()
|
|
}), {
|
|
[BUFFER]: buf
|
|
});
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as json
|
|
*
|
|
* @return Promise
|
|
*/
|
|
json() {
|
|
var _this2 = this;
|
|
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
try {
|
|
return JSON.parse(buffer.toString());
|
|
} catch (err) {
|
|
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
|
|
}
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as text
|
|
*
|
|
* @return Promise
|
|
*/
|
|
text() {
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
return buffer.toString();
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as buffer (non-spec api)
|
|
*
|
|
* @return Promise
|
|
*/
|
|
buffer() {
|
|
return consumeBody.call(this);
|
|
},
|
|
|
|
/**
|
|
* Decode response as text, while automatically detecting the encoding and
|
|
* trying to decode to UTF-8 (non-spec api)
|
|
*
|
|
* @return Promise
|
|
*/
|
|
textConverted() {
|
|
var _this3 = this;
|
|
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
return convertBody(buffer, _this3.headers);
|
|
});
|
|
}
|
|
};
|
|
|
|
// In browsers, all properties are enumerable.
|
|
Object.defineProperties(Body.prototype, {
|
|
body: { enumerable: true },
|
|
bodyUsed: { enumerable: true },
|
|
arrayBuffer: { enumerable: true },
|
|
blob: { enumerable: true },
|
|
json: { enumerable: true },
|
|
text: { enumerable: true }
|
|
});
|
|
|
|
Body.mixIn = function (proto) {
|
|
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
|
|
// istanbul ignore else: future proof
|
|
if (!(name in proto)) {
|
|
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
|
|
Object.defineProperty(proto, name, desc);
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Consume and convert an entire Body to a Buffer.
|
|
*
|
|
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
|
|
*
|
|
* @return Promise
|
|
*/
|
|
function consumeBody() {
|
|
var _this4 = this;
|
|
|
|
if (this[INTERNALS].disturbed) {
|
|
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
|
|
}
|
|
|
|
this[INTERNALS].disturbed = true;
|
|
|
|
if (this[INTERNALS].error) {
|
|
return Body.Promise.reject(this[INTERNALS].error);
|
|
}
|
|
|
|
let body = this.body;
|
|
|
|
// body is null
|
|
if (body === null) {
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|
}
|
|
|
|
// body is blob
|
|
if (isBlob(body)) {
|
|
body = body.stream();
|
|
}
|
|
|
|
// body is buffer
|
|
if (Buffer.isBuffer(body)) {
|
|
return Body.Promise.resolve(body);
|
|
}
|
|
|
|
// istanbul ignore if: should never happen
|
|
if (!(body instanceof Stream)) {
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|
}
|
|
|
|
// body is stream
|
|
// get ready to actually consume the body
|
|
let accum = [];
|
|
let accumBytes = 0;
|
|
let abort = false;
|
|
|
|
return new Body.Promise(function (resolve, reject) {
|
|
let resTimeout;
|
|
|
|
// allow timeout on slow response body
|
|
if (_this4.timeout) {
|
|
resTimeout = setTimeout(function () {
|
|
abort = true;
|
|
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
|
|
}, _this4.timeout);
|
|
}
|
|
|
|
// handle stream errors
|
|
body.on('error', function (err) {
|
|
if (err.name === 'AbortError') {
|
|
// if the request was aborted, reject with this Error
|
|
abort = true;
|
|
reject(err);
|
|
} else {
|
|
// other errors, such as incorrect content-encoding
|
|
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
|
|
}
|
|
});
|
|
|
|
body.on('data', function (chunk) {
|
|
if (abort || chunk === null) {
|
|
return;
|
|
}
|
|
|
|
if (_this4.size && accumBytes + chunk.length > _this4.size) {
|
|
abort = true;
|
|
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
|
|
return;
|
|
}
|
|
|
|
accumBytes += chunk.length;
|
|
accum.push(chunk);
|
|
});
|
|
|
|
body.on('end', function () {
|
|
if (abort) {
|
|
return;
|
|
}
|
|
|
|
clearTimeout(resTimeout);
|
|
|
|
try {
|
|
resolve(Buffer.concat(accum, accumBytes));
|
|
} catch (err) {
|
|
// handle streams that have accumulated too much data (issue #414)
|
|
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Detect buffer encoding and convert to target encoding
|
|
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
|
|
*
|
|
* @param Buffer buffer Incoming buffer
|
|
* @param String encoding Target encoding
|
|
* @return String
|
|
*/
|
|
function convertBody(buffer, headers) {
|
|
if (typeof convert !== 'function') {
|
|
throw new Error('The package `encoding` must be installed to use the textConverted() function');
|
|
}
|
|
|
|
const ct = headers.get('content-type');
|
|
let charset = 'utf-8';
|
|
let res, str;
|
|
|
|
// header
|
|
if (ct) {
|
|
res = /charset=([^;]*)/i.exec(ct);
|
|
}
|
|
|
|
// no charset in content type, peek at response body for at most 1024 bytes
|
|
str = buffer.slice(0, 1024).toString();
|
|
|
|
// html5
|
|
if (!res && str) {
|
|
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
|
|
}
|
|
|
|
// html4
|
|
if (!res && str) {
|
|
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
|
|
|
|
if (res) {
|
|
res = /charset=(.*)/i.exec(res.pop());
|
|
}
|
|
}
|
|
|
|
// xml
|
|
if (!res && str) {
|
|
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
|
|
}
|
|
|
|
// found charset
|
|
if (res) {
|
|
charset = res.pop();
|
|
|
|
// prevent decode issues when sites use incorrect encoding
|
|
// ref: https://hsivonen.fi/encoding-menu/
|
|
if (charset === 'gb2312' || charset === 'gbk') {
|
|
charset = 'gb18030';
|
|
}
|
|
}
|
|
|
|
// turn raw buffers into a single utf-8 buffer
|
|
return convert(buffer, 'UTF-8', charset).toString();
|
|
}
|
|
|
|
/**
|
|
* Detect a URLSearchParams object
|
|
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
|
|
*
|
|
* @param Object obj Object to detect by type or brand
|
|
* @return String
|
|
*/
|
|
function isURLSearchParams(obj) {
|
|
// Duck-typing as a necessary condition.
|
|
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
|
|
return false;
|
|
}
|
|
|
|
// Brand-checking and more duck-typing as optional condition.
|
|
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
|
|
}
|
|
|
|
/**
|
|
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
|
|
* @param {*} obj
|
|
* @return {boolean}
|
|
*/
|
|
function isBlob(obj) {
|
|
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
|
|
}
|
|
|
|
/**
|
|
* Clone body given Res/Req instance
|
|
*
|
|
* @param Mixed instance Response or Request instance
|
|
* @return Mixed
|
|
*/
|
|
function clone(instance) {
|
|
let p1, p2;
|
|
let body = instance.body;
|
|
|
|
// don't allow cloning a used body
|
|
if (instance.bodyUsed) {
|
|
throw new Error('cannot clone body after it is used');
|
|
}
|
|
|
|
// check that body is a stream and not form-data object
|
|
// note: we can't clone the form-data object without having it as a dependency
|
|
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
|
|
// tee instance body
|
|
p1 = new PassThrough();
|
|
p2 = new PassThrough();
|
|
body.pipe(p1);
|
|
body.pipe(p2);
|
|
// set instance body to teed body and return the other teed body
|
|
instance[INTERNALS].body = p1;
|
|
body = p2;
|
|
}
|
|
|
|
return body;
|
|
}
|
|
|
|
/**
|
|
* Performs the operation "extract a `Content-Type` value from |object|" as
|
|
* specified in the specification:
|
|
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
|
*
|
|
* This function assumes that instance.body is present.
|
|
*
|
|
* @param Mixed instance Any options.body input
|
|
*/
|
|
function extractContentType(body) {
|
|
if (body === null) {
|
|
// body is null
|
|
return null;
|
|
} else if (typeof body === 'string') {
|
|
// body is string
|
|
return 'text/plain;charset=UTF-8';
|
|
} else if (isURLSearchParams(body)) {
|
|
// body is a URLSearchParams
|
|
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
|
} else if (isBlob(body)) {
|
|
// body is blob
|
|
return body.type || null;
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
return null;
|
|
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|
// body is ArrayBuffer
|
|
return null;
|
|
} else if (ArrayBuffer.isView(body)) {
|
|
// body is ArrayBufferView
|
|
return null;
|
|
} else if (typeof body.getBoundary === 'function') {
|
|
// detect form data input from form-data module
|
|
return `multipart/form-data;boundary=${body.getBoundary()}`;
|
|
} else if (body instanceof Stream) {
|
|
// body is stream
|
|
// can't really do much about this
|
|
return null;
|
|
} else {
|
|
// Body constructor defaults other things to string
|
|
return 'text/plain;charset=UTF-8';
|
|
}
|
|
}
|
|
|
|
/**
|
|
* The Fetch Standard treats this as if "total bytes" is a property on the body.
|
|
* For us, we have to explicitly get it with a function.
|
|
*
|
|
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
|
|
*
|
|
* @param Body instance Instance of Body
|
|
* @return Number? Number of bytes, or null if not possible
|
|
*/
|
|
function getTotalBytes(instance) {
|
|
const body = instance.body;
|
|
|
|
|
|
if (body === null) {
|
|
// body is null
|
|
return 0;
|
|
} else if (isBlob(body)) {
|
|
return body.size;
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
return body.length;
|
|
} else if (body && typeof body.getLengthSync === 'function') {
|
|
// detect form data input from form-data module
|
|
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
|
|
body.hasKnownLength && body.hasKnownLength()) {
|
|
// 2.x
|
|
return body.getLengthSync();
|
|
}
|
|
return null;
|
|
} else {
|
|
// body is stream
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
|
|
*
|
|
* @param Body instance Instance of Body
|
|
* @return Void
|
|
*/
|
|
function writeToStream(dest, instance) {
|
|
const body = instance.body;
|
|
|
|
|
|
if (body === null) {
|
|
// body is null
|
|
dest.end();
|
|
} else if (isBlob(body)) {
|
|
body.stream().pipe(dest);
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
dest.write(body);
|
|
dest.end();
|
|
} else {
|
|
// body is stream
|
|
body.pipe(dest);
|
|
}
|
|
}
|
|
|
|
// expose Promise
|
|
Body.Promise = global.Promise;
|
|
|
|
/**
|
|
* headers.js
|
|
*
|
|
* Headers class offers convenient helpers
|
|
*/
|
|
|
|
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
|
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
|
|
|
|
function validateName(name) {
|
|
name = `${name}`;
|
|
if (invalidTokenRegex.test(name) || name === '') {
|
|
throw new TypeError(`${name} is not a legal HTTP header name`);
|
|
}
|
|
}
|
|
|
|
function validateValue(value) {
|
|
value = `${value}`;
|
|
if (invalidHeaderCharRegex.test(value)) {
|
|
throw new TypeError(`${value} is not a legal HTTP header value`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Find the key in the map object given a header name.
|
|
*
|
|
* Returns undefined if not found.
|
|
*
|
|
* @param String name Header name
|
|
* @return String|Undefined
|
|
*/
|
|
function find(map, name) {
|
|
name = name.toLowerCase();
|
|
for (const key in map) {
|
|
if (key.toLowerCase() === name) {
|
|
return key;
|
|
}
|
|
}
|
|
return undefined;
|
|
}
|
|
|
|
const MAP = Symbol('map');
|
|
class Headers {
|
|
/**
|
|
* Headers class
|
|
*
|
|
* @param Object headers Response headers
|
|
* @return Void
|
|
*/
|
|
constructor() {
|
|
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
|
|
|
|
this[MAP] = Object.create(null);
|
|
|
|
if (init instanceof Headers) {
|
|
const rawHeaders = init.raw();
|
|
const headerNames = Object.keys(rawHeaders);
|
|
|
|
for (const headerName of headerNames) {
|
|
for (const value of rawHeaders[headerName]) {
|
|
this.append(headerName, value);
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
// We don't worry about converting prop to ByteString here as append()
|
|
// will handle it.
|
|
if (init == null) ; else if (typeof init === 'object') {
|
|
const method = init[Symbol.iterator];
|
|
if (method != null) {
|
|
if (typeof method !== 'function') {
|
|
throw new TypeError('Header pairs must be iterable');
|
|
}
|
|
|
|
// sequence<sequence<ByteString>>
|
|
// Note: per spec we have to first exhaust the lists then process them
|
|
const pairs = [];
|
|
for (const pair of init) {
|
|
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
|
|
throw new TypeError('Each header pair must be iterable');
|
|
}
|
|
pairs.push(Array.from(pair));
|
|
}
|
|
|
|
for (const pair of pairs) {
|
|
if (pair.length !== 2) {
|
|
throw new TypeError('Each header pair must be a name/value tuple');
|
|
}
|
|
this.append(pair[0], pair[1]);
|
|
}
|
|
} else {
|
|
// record<ByteString, ByteString>
|
|
for (const key of Object.keys(init)) {
|
|
const value = init[key];
|
|
this.append(key, value);
|
|
}
|
|
}
|
|
} else {
|
|
throw new TypeError('Provided initializer must be an object');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Return combined header value given name
|
|
*
|
|
* @param String name Header name
|
|
* @return Mixed
|
|
*/
|
|
get(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
const key = find(this[MAP], name);
|
|
if (key === undefined) {
|
|
return null;
|
|
}
|
|
|
|
return this[MAP][key].join(', ');
|
|
}
|
|
|
|
/**
|
|
* Iterate over all headers
|
|
*
|
|
* @param Function callback Executed for each item with parameters (value, name, thisArg)
|
|
* @param Boolean thisArg `this` context for callback function
|
|
* @return Void
|
|
*/
|
|
forEach(callback) {
|
|
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
|
|
|
|
let pairs = getHeaders(this);
|
|
let i = 0;
|
|
while (i < pairs.length) {
|
|
var _pairs$i = pairs[i];
|
|
const name = _pairs$i[0],
|
|
value = _pairs$i[1];
|
|
|
|
callback.call(thisArg, value, name, this);
|
|
pairs = getHeaders(this);
|
|
i++;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Overwrite header values given name
|
|
*
|
|
* @param String name Header name
|
|
* @param String value Header value
|
|
* @return Void
|
|
*/
|
|
set(name, value) {
|
|
name = `${name}`;
|
|
value = `${value}`;
|
|
validateName(name);
|
|
validateValue(value);
|
|
const key = find(this[MAP], name);
|
|
this[MAP][key !== undefined ? key : name] = [value];
|
|
}
|
|
|
|
/**
|
|
* Append a value onto existing header
|
|
*
|
|
* @param String name Header name
|
|
* @param String value Header value
|
|
* @return Void
|
|
*/
|
|
append(name, value) {
|
|
name = `${name}`;
|
|
value = `${value}`;
|
|
validateName(name);
|
|
validateValue(value);
|
|
const key = find(this[MAP], name);
|
|
if (key !== undefined) {
|
|
this[MAP][key].push(value);
|
|
} else {
|
|
this[MAP][name] = [value];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Check for header name existence
|
|
*
|
|
* @param String name Header name
|
|
* @return Boolean
|
|
*/
|
|
has(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
return find(this[MAP], name) !== undefined;
|
|
}
|
|
|
|
/**
|
|
* Delete all header values given name
|
|
*
|
|
* @param String name Header name
|
|
* @return Void
|
|
*/
|
|
delete(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
const key = find(this[MAP], name);
|
|
if (key !== undefined) {
|
|
delete this[MAP][key];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Return raw headers (non-spec api)
|
|
*
|
|
* @return Object
|
|
*/
|
|
raw() {
|
|
return this[MAP];
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on keys.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
keys() {
|
|
return createHeadersIterator(this, 'key');
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on values.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
values() {
|
|
return createHeadersIterator(this, 'value');
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on entries.
|
|
*
|
|
* This is the default iterator of the Headers object.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
[Symbol.iterator]() {
|
|
return createHeadersIterator(this, 'key+value');
|
|
}
|
|
}
|
|
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
|
|
|
|
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
|
|
value: 'Headers',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
Object.defineProperties(Headers.prototype, {
|
|
get: { enumerable: true },
|
|
forEach: { enumerable: true },
|
|
set: { enumerable: true },
|
|
append: { enumerable: true },
|
|
has: { enumerable: true },
|
|
delete: { enumerable: true },
|
|
keys: { enumerable: true },
|
|
values: { enumerable: true },
|
|
entries: { enumerable: true }
|
|
});
|
|
|
|
function getHeaders(headers) {
|
|
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
|
|
|
|
const keys = Object.keys(headers[MAP]).sort();
|
|
return keys.map(kind === 'key' ? function (k) {
|
|
return k.toLowerCase();
|
|
} : kind === 'value' ? function (k) {
|
|
return headers[MAP][k].join(', ');
|
|
} : function (k) {
|
|
return [k.toLowerCase(), headers[MAP][k].join(', ')];
|
|
});
|
|
}
|
|
|
|
const INTERNAL = Symbol('internal');
|
|
|
|
function createHeadersIterator(target, kind) {
|
|
const iterator = Object.create(HeadersIteratorPrototype);
|
|
iterator[INTERNAL] = {
|
|
target,
|
|
kind,
|
|
index: 0
|
|
};
|
|
return iterator;
|
|
}
|
|
|
|
const HeadersIteratorPrototype = Object.setPrototypeOf({
|
|
next() {
|
|
// istanbul ignore if
|
|
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
|
throw new TypeError('Value of `this` is not a HeadersIterator');
|
|
}
|
|
|
|
var _INTERNAL = this[INTERNAL];
|
|
const target = _INTERNAL.target,
|
|
kind = _INTERNAL.kind,
|
|
index = _INTERNAL.index;
|
|
|
|
const values = getHeaders(target, kind);
|
|
const len = values.length;
|
|
if (index >= len) {
|
|
return {
|
|
value: undefined,
|
|
done: true
|
|
};
|
|
}
|
|
|
|
this[INTERNAL].index = index + 1;
|
|
|
|
return {
|
|
value: values[index],
|
|
done: false
|
|
};
|
|
}
|
|
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
|
|
|
|
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
|
value: 'HeadersIterator',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
/**
|
|
* Export the Headers object in a form that Node.js can consume.
|
|
*
|
|
* @param Headers headers
|
|
* @return Object
|
|
*/
|
|
function exportNodeCompatibleHeaders(headers) {
|
|
const obj = Object.assign({ __proto__: null }, headers[MAP]);
|
|
|
|
// http.request() only supports string as Host header. This hack makes
|
|
// specifying custom Host header possible.
|
|
const hostHeaderKey = find(headers[MAP], 'Host');
|
|
if (hostHeaderKey !== undefined) {
|
|
obj[hostHeaderKey] = obj[hostHeaderKey][0];
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
/**
|
|
* Create a Headers object from an object of headers, ignoring those that do
|
|
* not conform to HTTP grammar productions.
|
|
*
|
|
* @param Object obj Object of headers
|
|
* @return Headers
|
|
*/
|
|
function createHeadersLenient(obj) {
|
|
const headers = new Headers();
|
|
for (const name of Object.keys(obj)) {
|
|
if (invalidTokenRegex.test(name)) {
|
|
continue;
|
|
}
|
|
if (Array.isArray(obj[name])) {
|
|
for (const val of obj[name]) {
|
|
if (invalidHeaderCharRegex.test(val)) {
|
|
continue;
|
|
}
|
|
if (headers[MAP][name] === undefined) {
|
|
headers[MAP][name] = [val];
|
|
} else {
|
|
headers[MAP][name].push(val);
|
|
}
|
|
}
|
|
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
|
headers[MAP][name] = [obj[name]];
|
|
}
|
|
}
|
|
return headers;
|
|
}
|
|
|
|
const INTERNALS$1 = Symbol('Response internals');
|
|
|
|
// fix an issue where "STATUS_CODES" aren't a named export for node <10
|
|
const STATUS_CODES = http.STATUS_CODES;
|
|
|
|
/**
|
|
* Response class
|
|
*
|
|
* @param Stream body Readable stream
|
|
* @param Object opts Response options
|
|
* @return Void
|
|
*/
|
|
class Response {
|
|
constructor() {
|
|
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
|
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
Body.call(this, body, opts);
|
|
|
|
const status = opts.status || 200;
|
|
const headers = new Headers(opts.headers);
|
|
|
|
if (body != null && !headers.has('Content-Type')) {
|
|
const contentType = extractContentType(body);
|
|
if (contentType) {
|
|
headers.append('Content-Type', contentType);
|
|
}
|
|
}
|
|
|
|
this[INTERNALS$1] = {
|
|
url: opts.url,
|
|
status,
|
|
statusText: opts.statusText || STATUS_CODES[status],
|
|
headers,
|
|
counter: opts.counter
|
|
};
|
|
}
|
|
|
|
get url() {
|
|
return this[INTERNALS$1].url || '';
|
|
}
|
|
|
|
get status() {
|
|
return this[INTERNALS$1].status;
|
|
}
|
|
|
|
/**
|
|
* Convenience property representing if the request ended normally
|
|
*/
|
|
get ok() {
|
|
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
|
|
}
|
|
|
|
get redirected() {
|
|
return this[INTERNALS$1].counter > 0;
|
|
}
|
|
|
|
get statusText() {
|
|
return this[INTERNALS$1].statusText;
|
|
}
|
|
|
|
get headers() {
|
|
return this[INTERNALS$1].headers;
|
|
}
|
|
|
|
/**
|
|
* Clone this response
|
|
*
|
|
* @return Response
|
|
*/
|
|
clone() {
|
|
return new Response(clone(this), {
|
|
url: this.url,
|
|
status: this.status,
|
|
statusText: this.statusText,
|
|
headers: this.headers,
|
|
ok: this.ok,
|
|
redirected: this.redirected
|
|
});
|
|
}
|
|
}
|
|
|
|
Body.mixIn(Response.prototype);
|
|
|
|
Object.defineProperties(Response.prototype, {
|
|
url: { enumerable: true },
|
|
status: { enumerable: true },
|
|
ok: { enumerable: true },
|
|
redirected: { enumerable: true },
|
|
statusText: { enumerable: true },
|
|
headers: { enumerable: true },
|
|
clone: { enumerable: true }
|
|
});
|
|
|
|
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|
value: 'Response',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
const INTERNALS$2 = Symbol('Request internals');
|
|
|
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
|
const parse_url = Url.parse;
|
|
const format_url = Url.format;
|
|
|
|
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
|
|
|
|
/**
|
|
* Check if a value is an instance of Request.
|
|
*
|
|
* @param Mixed input
|
|
* @return Boolean
|
|
*/
|
|
function isRequest(input) {
|
|
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
|
|
}
|
|
|
|
function isAbortSignal(signal) {
|
|
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
|
|
return !!(proto && proto.constructor.name === 'AbortSignal');
|
|
}
|
|
|
|
/**
|
|
* Request class
|
|
*
|
|
* @param Mixed input Url or Request instance
|
|
* @param Object init Custom options
|
|
* @return Void
|
|
*/
|
|
class Request {
|
|
constructor(input) {
|
|
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
let parsedURL;
|
|
|
|
// normalize input
|
|
if (!isRequest(input)) {
|
|
if (input && input.href) {
|
|
// in order to support Node.js' Url objects; though WHATWG's URL objects
|
|
// will fall into this branch also (since their `toString()` will return
|
|
// `href` property anyway)
|
|
parsedURL = parse_url(input.href);
|
|
} else {
|
|
// coerce input to a string before attempting to parse
|
|
parsedURL = parse_url(`${input}`);
|
|
}
|
|
input = {};
|
|
} else {
|
|
parsedURL = parse_url(input.url);
|
|
}
|
|
|
|
let method = init.method || input.method || 'GET';
|
|
method = method.toUpperCase();
|
|
|
|
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
|
|
throw new TypeError('Request with GET/HEAD method cannot have body');
|
|
}
|
|
|
|
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
|
|
|
|
Body.call(this, inputBody, {
|
|
timeout: init.timeout || input.timeout || 0,
|
|
size: init.size || input.size || 0
|
|
});
|
|
|
|
const headers = new Headers(init.headers || input.headers || {});
|
|
|
|
if (inputBody != null && !headers.has('Content-Type')) {
|
|
const contentType = extractContentType(inputBody);
|
|
if (contentType) {
|
|
headers.append('Content-Type', contentType);
|
|
}
|
|
}
|
|
|
|
let signal = isRequest(input) ? input.signal : null;
|
|
if ('signal' in init) signal = init.signal;
|
|
|
|
if (signal != null && !isAbortSignal(signal)) {
|
|
throw new TypeError('Expected signal to be an instanceof AbortSignal');
|
|
}
|
|
|
|
this[INTERNALS$2] = {
|
|
method,
|
|
redirect: init.redirect || input.redirect || 'follow',
|
|
headers,
|
|
parsedURL,
|
|
signal
|
|
};
|
|
|
|
// node-fetch-only options
|
|
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
|
|
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
|
|
this.counter = init.counter || input.counter || 0;
|
|
this.agent = init.agent || input.agent;
|
|
}
|
|
|
|
get method() {
|
|
return this[INTERNALS$2].method;
|
|
}
|
|
|
|
get url() {
|
|
return format_url(this[INTERNALS$2].parsedURL);
|
|
}
|
|
|
|
get headers() {
|
|
return this[INTERNALS$2].headers;
|
|
}
|
|
|
|
get redirect() {
|
|
return this[INTERNALS$2].redirect;
|
|
}
|
|
|
|
get signal() {
|
|
return this[INTERNALS$2].signal;
|
|
}
|
|
|
|
/**
|
|
* Clone this request
|
|
*
|
|
* @return Request
|
|
*/
|
|
clone() {
|
|
return new Request(this);
|
|
}
|
|
}
|
|
|
|
Body.mixIn(Request.prototype);
|
|
|
|
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
|
|
value: 'Request',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
Object.defineProperties(Request.prototype, {
|
|
method: { enumerable: true },
|
|
url: { enumerable: true },
|
|
headers: { enumerable: true },
|
|
redirect: { enumerable: true },
|
|
clone: { enumerable: true },
|
|
signal: { enumerable: true }
|
|
});
|
|
|
|
/**
|
|
* Convert a Request to Node.js http request options.
|
|
*
|
|
* @param Request A Request instance
|
|
* @return Object The options object to be passed to http.request
|
|
*/
|
|
function getNodeRequestOptions(request) {
|
|
const parsedURL = request[INTERNALS$2].parsedURL;
|
|
const headers = new Headers(request[INTERNALS$2].headers);
|
|
|
|
// fetch step 1.3
|
|
if (!headers.has('Accept')) {
|
|
headers.set('Accept', '*/*');
|
|
}
|
|
|
|
// Basic fetch
|
|
if (!parsedURL.protocol || !parsedURL.hostname) {
|
|
throw new TypeError('Only absolute URLs are supported');
|
|
}
|
|
|
|
if (!/^https?:$/.test(parsedURL.protocol)) {
|
|
throw new TypeError('Only HTTP(S) protocols are supported');
|
|
}
|
|
|
|
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
|
|
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch steps 2.4-2.7
|
|
let contentLengthValue = null;
|
|
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
|
|
contentLengthValue = '0';
|
|
}
|
|
if (request.body != null) {
|
|
const totalBytes = getTotalBytes(request);
|
|
if (typeof totalBytes === 'number') {
|
|
contentLengthValue = String(totalBytes);
|
|
}
|
|
}
|
|
if (contentLengthValue) {
|
|
headers.set('Content-Length', contentLengthValue);
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch step 2.11
|
|
if (!headers.has('User-Agent')) {
|
|
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch step 2.15
|
|
if (request.compress && !headers.has('Accept-Encoding')) {
|
|
headers.set('Accept-Encoding', 'gzip,deflate');
|
|
}
|
|
|
|
let agent = request.agent;
|
|
if (typeof agent === 'function') {
|
|
agent = agent(parsedURL);
|
|
}
|
|
|
|
if (!headers.has('Connection') && !agent) {
|
|
headers.set('Connection', 'close');
|
|
}
|
|
|
|
// HTTP-network fetch step 4.2
|
|
// chunked encoding is handled by Node.js
|
|
|
|
return Object.assign({}, parsedURL, {
|
|
method: request.method,
|
|
headers: exportNodeCompatibleHeaders(headers),
|
|
agent
|
|
});
|
|
}
|
|
|
|
/**
|
|
* abort-error.js
|
|
*
|
|
* AbortError interface for cancelled requests
|
|
*/
|
|
|
|
/**
|
|
* Create AbortError instance
|
|
*
|
|
* @param String message Error message for human
|
|
* @return AbortError
|
|
*/
|
|
function AbortError(message) {
|
|
Error.call(this, message);
|
|
|
|
this.type = 'aborted';
|
|
this.message = message;
|
|
|
|
// hide custom error implementation details from end-users
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
AbortError.prototype = Object.create(Error.prototype);
|
|
AbortError.prototype.constructor = AbortError;
|
|
AbortError.prototype.name = 'AbortError';
|
|
|
|
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
|
const PassThrough$1 = Stream.PassThrough;
|
|
const resolve_url = Url.resolve;
|
|
|
|
/**
|
|
* Fetch function
|
|
*
|
|
* @param Mixed url Absolute url or Request instance
|
|
* @param Object opts Fetch options
|
|
* @return Promise
|
|
*/
|
|
function fetch(url, opts) {
|
|
|
|
// allow custom promise
|
|
if (!fetch.Promise) {
|
|
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
|
|
}
|
|
|
|
Body.Promise = fetch.Promise;
|
|
|
|
// wrap http.request into fetch
|
|
return new fetch.Promise(function (resolve, reject) {
|
|
// build request object
|
|
const request = new Request(url, opts);
|
|
const options = getNodeRequestOptions(request);
|
|
|
|
const send = (options.protocol === 'https:' ? https : http).request;
|
|
const signal = request.signal;
|
|
|
|
let response = null;
|
|
|
|
const abort = function abort() {
|
|
let error = new AbortError('The user aborted a request.');
|
|
reject(error);
|
|
if (request.body && request.body instanceof Stream.Readable) {
|
|
request.body.destroy(error);
|
|
}
|
|
if (!response || !response.body) return;
|
|
response.body.emit('error', error);
|
|
};
|
|
|
|
if (signal && signal.aborted) {
|
|
abort();
|
|
return;
|
|
}
|
|
|
|
const abortAndFinalize = function abortAndFinalize() {
|
|
abort();
|
|
finalize();
|
|
};
|
|
|
|
// send request
|
|
const req = send(options);
|
|
let reqTimeout;
|
|
|
|
if (signal) {
|
|
signal.addEventListener('abort', abortAndFinalize);
|
|
}
|
|
|
|
function finalize() {
|
|
req.abort();
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|
clearTimeout(reqTimeout);
|
|
}
|
|
|
|
if (request.timeout) {
|
|
req.once('socket', function (socket) {
|
|
reqTimeout = setTimeout(function () {
|
|
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
|
|
finalize();
|
|
}, request.timeout);
|
|
});
|
|
}
|
|
|
|
req.on('error', function (err) {
|
|
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
|
|
finalize();
|
|
});
|
|
|
|
req.on('response', function (res) {
|
|
clearTimeout(reqTimeout);
|
|
|
|
const headers = createHeadersLenient(res.headers);
|
|
|
|
// HTTP fetch step 5
|
|
if (fetch.isRedirect(res.statusCode)) {
|
|
// HTTP fetch step 5.2
|
|
const location = headers.get('Location');
|
|
|
|
// HTTP fetch step 5.3
|
|
const locationURL = location === null ? null : resolve_url(request.url, location);
|
|
|
|
// HTTP fetch step 5.5
|
|
switch (request.redirect) {
|
|
case 'error':
|
|
reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
|
|
finalize();
|
|
return;
|
|
case 'manual':
|
|
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
|
|
if (locationURL !== null) {
|
|
// handle corrupted header
|
|
try {
|
|
headers.set('Location', locationURL);
|
|
} catch (err) {
|
|
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
|
|
reject(err);
|
|
}
|
|
}
|
|
break;
|
|
case 'follow':
|
|
// HTTP-redirect fetch step 2
|
|
if (locationURL === null) {
|
|
break;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 5
|
|
if (request.counter >= request.follow) {
|
|
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
|
|
finalize();
|
|
return;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 6 (counter increment)
|
|
// Create a new Request object.
|
|
const requestOpts = {
|
|
headers: new Headers(request.headers),
|
|
follow: request.follow,
|
|
counter: request.counter + 1,
|
|
agent: request.agent,
|
|
compress: request.compress,
|
|
method: request.method,
|
|
body: request.body,
|
|
signal: request.signal,
|
|
timeout: request.timeout
|
|
};
|
|
|
|
// HTTP-redirect fetch step 9
|
|
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
|
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
|
finalize();
|
|
return;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 11
|
|
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
|
|
requestOpts.method = 'GET';
|
|
requestOpts.body = undefined;
|
|
requestOpts.headers.delete('content-length');
|
|
}
|
|
|
|
// HTTP-redirect fetch step 15
|
|
resolve(fetch(new Request(locationURL, requestOpts)));
|
|
finalize();
|
|
return;
|
|
}
|
|
}
|
|
|
|
// prepare response
|
|
res.once('end', function () {
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|
});
|
|
let body = res.pipe(new PassThrough$1());
|
|
|
|
const response_options = {
|
|
url: request.url,
|
|
status: res.statusCode,
|
|
statusText: res.statusMessage,
|
|
headers: headers,
|
|
size: request.size,
|
|
timeout: request.timeout,
|
|
counter: request.counter
|
|
};
|
|
|
|
// HTTP-network fetch step 12.1.1.3
|
|
const codings = headers.get('Content-Encoding');
|
|
|
|
// HTTP-network fetch step 12.1.1.4: handle content codings
|
|
|
|
// in following scenarios we ignore compression support
|
|
// 1. compression support is disabled
|
|
// 2. HEAD request
|
|
// 3. no Content-Encoding header
|
|
// 4. no content response (204)
|
|
// 5. content not modified response (304)
|
|
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// For Node v6+
|
|
// Be less strict when decoding compressed responses, since sometimes
|
|
// servers send slightly invalid responses that are still accepted
|
|
// by common browsers.
|
|
// Always using Z_SYNC_FLUSH is what cURL does.
|
|
const zlibOptions = {
|
|
flush: zlib.Z_SYNC_FLUSH,
|
|
finishFlush: zlib.Z_SYNC_FLUSH
|
|
};
|
|
|
|
// for gzip
|
|
if (codings == 'gzip' || codings == 'x-gzip') {
|
|
body = body.pipe(zlib.createGunzip(zlibOptions));
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// for deflate
|
|
if (codings == 'deflate' || codings == 'x-deflate') {
|
|
// handle the infamous raw deflate response from old servers
|
|
// a hack for old IIS and Apache servers
|
|
const raw = res.pipe(new PassThrough$1());
|
|
raw.once('data', function (chunk) {
|
|
// see http://stackoverflow.com/questions/37519828
|
|
if ((chunk[0] & 0x0F) === 0x08) {
|
|
body = body.pipe(zlib.createInflate());
|
|
} else {
|
|
body = body.pipe(zlib.createInflateRaw());
|
|
}
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
});
|
|
return;
|
|
}
|
|
|
|
// for br
|
|
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
|
|
body = body.pipe(zlib.createBrotliDecompress());
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// otherwise, use response as-is
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
});
|
|
|
|
writeToStream(req, request);
|
|
});
|
|
}
|
|
/**
|
|
* Redirect code matching
|
|
*
|
|
* @param Number code Status code
|
|
* @return Boolean
|
|
*/
|
|
fetch.isRedirect = function (code) {
|
|
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
|
};
|
|
|
|
// expose Promise
|
|
fetch.Promise = global.Promise;
|
|
|
|
module.exports = exports = fetch;
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.default = exports;
|
|
exports.Headers = Headers;
|
|
exports.Request = Request;
|
|
exports.Response = Response;
|
|
exports.FetchError = FetchError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 462:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
// See http://www.robvanderwoude.com/escapechars.php
|
|
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
|
|
|
|
function escapeCommand(arg) {
|
|
// Escape meta chars
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|
|
|
return arg;
|
|
}
|
|
|
|
function escapeArgument(arg, doubleEscapeMetaChars) {
|
|
// Convert to string
|
|
arg = `${arg}`;
|
|
|
|
// Algorithm below is based on https://qntm.org/cmd
|
|
|
|
// Sequence of backslashes followed by a double quote:
|
|
// double up all the backslashes and escape the double quote
|
|
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
|
|
|
|
// Sequence of backslashes followed by the end of the string
|
|
// (which will become a double quote later):
|
|
// double up all the backslashes
|
|
arg = arg.replace(/(\\*)$/, '$1$1');
|
|
|
|
// All other backslashes occur literally
|
|
|
|
// Quote the whole thing:
|
|
arg = `"${arg}"`;
|
|
|
|
// Escape meta chars
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|
|
|
// Double escape meta chars if necessary
|
|
if (doubleEscapeMetaChars) {
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|
}
|
|
|
|
return arg;
|
|
}
|
|
|
|
module.exports.command = escapeCommand;
|
|
module.exports.argument = escapeArgument;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 463:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var deprecation = __webpack_require__(692);
|
|
var once = _interopDefault(__webpack_require__(969));
|
|
|
|
const logOnce = once(deprecation => console.warn(deprecation));
|
|
/**
|
|
* Error with extra properties to help with debugging
|
|
*/
|
|
|
|
class RequestError extends Error {
|
|
constructor(message, statusCode, options) {
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
this.name = "HttpError";
|
|
this.status = statusCode;
|
|
Object.defineProperty(this, "code", {
|
|
get() {
|
|
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
|
|
return statusCode;
|
|
}
|
|
|
|
});
|
|
this.headers = options.headers || {}; // redact request credentials without mutating original request options
|
|
|
|
const requestCopy = Object.assign({}, options.request);
|
|
|
|
if (options.request.headers.authorization) {
|
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
|
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
|
|
});
|
|
}
|
|
|
|
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
|
|
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
|
|
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
|
|
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
|
|
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
|
this.request = requestCopy;
|
|
}
|
|
|
|
}
|
|
|
|
exports.RequestError = RequestError;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 464:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const assert = __importStar(__webpack_require__(357));
|
|
const core = __importStar(__webpack_require__(470));
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const github = __importStar(__webpack_require__(469));
|
|
const io = __importStar(__webpack_require__(1));
|
|
const path = __importStar(__webpack_require__(622));
|
|
const retryHelper = __importStar(__webpack_require__(587));
|
|
const toolCache = __importStar(__webpack_require__(533));
|
|
const v4_1 = __importDefault(__webpack_require__(826));
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Determine the default branch
|
|
if (!ref && !commit) {
|
|
core.info('Determining the default branch');
|
|
ref = yield getDefaultBranch(authToken, owner, repo);
|
|
}
|
|
// Download the archive
|
|
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
core.info('Downloading the archive');
|
|
return yield downloadArchive(authToken, owner, repo, ref, commit);
|
|
}));
|
|
// Write archive to disk
|
|
core.info('Writing archive to disk');
|
|
const uniqueId = v4_1.default();
|
|
const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`);
|
|
yield fs.promises.writeFile(archivePath, archiveData);
|
|
archiveData = Buffer.from(''); // Free memory
|
|
// Extract archive
|
|
core.info('Extracting the archive');
|
|
const extractPath = path.join(repositoryPath, uniqueId);
|
|
yield io.mkdirP(extractPath);
|
|
if (IS_WINDOWS) {
|
|
yield toolCache.extractZip(archivePath, extractPath);
|
|
}
|
|
else {
|
|
yield toolCache.extractTar(archivePath, extractPath);
|
|
}
|
|
io.rmRF(archivePath);
|
|
// Determine the path of the repository content. The archive contains
|
|
// a top-level folder and the repository content is inside.
|
|
const archiveFileNames = yield fs.promises.readdir(extractPath);
|
|
assert.ok(archiveFileNames.length == 1, 'Expected exactly one directory inside archive');
|
|
const archiveVersion = archiveFileNames[0]; // The top-level folder name includes the short SHA
|
|
core.info(`Resolved version ${archiveVersion}`);
|
|
const tempRepositoryPath = path.join(extractPath, archiveVersion);
|
|
// Move the files
|
|
for (const fileName of yield fs.promises.readdir(tempRepositoryPath)) {
|
|
const sourcePath = path.join(tempRepositoryPath, fileName);
|
|
const targetPath = path.join(repositoryPath, fileName);
|
|
if (IS_WINDOWS) {
|
|
yield io.cp(sourcePath, targetPath, { recursive: true }); // Copy on Windows (Windows Defender may have a lock)
|
|
}
|
|
else {
|
|
yield io.mv(sourcePath, targetPath);
|
|
}
|
|
}
|
|
io.rmRF(extractPath);
|
|
});
|
|
}
|
|
exports.downloadRepository = downloadRepository;
|
|
/**
|
|
* Looks up the default branch name
|
|
*/
|
|
function getDefaultBranch(authToken, owner, repo) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
core.info('Retrieving the default branch name');
|
|
const octokit = new github.GitHub(authToken);
|
|
let result;
|
|
try {
|
|
// Get the default branch from the repo info
|
|
const response = yield octokit.repos.get({ owner, repo });
|
|
result = response.data.default_branch;
|
|
assert.ok(result, 'default_branch cannot be empty');
|
|
}
|
|
catch (err) {
|
|
// Handle .wiki repo
|
|
if (err['status'] === 404 && repo.toUpperCase().endsWith('.WIKI')) {
|
|
result = 'master';
|
|
}
|
|
// Otherwise error
|
|
else {
|
|
throw err;
|
|
}
|
|
}
|
|
// Print the default branch
|
|
core.info(`Default branch '${result}'`);
|
|
// Prefix with 'refs/heads'
|
|
if (!result.startsWith('refs/')) {
|
|
result = `refs/heads/${result}`;
|
|
}
|
|
return result;
|
|
}));
|
|
});
|
|
}
|
|
exports.getDefaultBranch = getDefaultBranch;
|
|
function downloadArchive(authToken, owner, repo, ref, commit) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const octokit = new github.GitHub(authToken);
|
|
const params = {
|
|
owner: owner,
|
|
repo: repo,
|
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
|
ref: commit || ref
|
|
};
|
|
const response = yield octokit.repos.getArchiveLink(params);
|
|
if (response.status != 200) {
|
|
throw new Error(`Unexpected response from GitHub API. Status: ${response.status}, Data: ${response.data}`);
|
|
}
|
|
return Buffer.from(response.data); // response.data is ArrayBuffer
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 469:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
|
|
const graphql_1 = __webpack_require__(898);
|
|
const rest_1 = __webpack_require__(0);
|
|
const Context = __importStar(__webpack_require__(262));
|
|
const httpClient = __importStar(__webpack_require__(539));
|
|
// We need this in order to extend Octokit
|
|
rest_1.Octokit.prototype = new rest_1.Octokit();
|
|
exports.context = new Context.Context();
|
|
class GitHub extends rest_1.Octokit {
|
|
constructor(token, opts) {
|
|
super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
|
|
this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
|
|
}
|
|
/**
|
|
* Disambiguates the constructor overload parameters
|
|
*/
|
|
static disambiguate(token, opts) {
|
|
return [
|
|
typeof token === 'string' ? token : '',
|
|
typeof token === 'object' ? token : opts || {}
|
|
];
|
|
}
|
|
static getOctokitOptions(args) {
|
|
const token = args[0];
|
|
const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
|
|
// Base URL - GHES or Dotcom
|
|
options.baseUrl = options.baseUrl || this.getApiBaseUrl();
|
|
// Auth
|
|
const auth = GitHub.getAuthString(token, options);
|
|
if (auth) {
|
|
options.auth = auth;
|
|
}
|
|
// Proxy
|
|
const agent = GitHub.getProxyAgent(options.baseUrl, options);
|
|
if (agent) {
|
|
// Shallow clone - don't mutate the object provided by the caller
|
|
options.request = options.request ? Object.assign({}, options.request) : {};
|
|
// Set the agent
|
|
options.request.agent = agent;
|
|
}
|
|
return options;
|
|
}
|
|
static getGraphQL(args) {
|
|
const defaults = {};
|
|
defaults.baseUrl = this.getGraphQLBaseUrl();
|
|
const token = args[0];
|
|
const options = args[1];
|
|
// Authorization
|
|
const auth = this.getAuthString(token, options);
|
|
if (auth) {
|
|
defaults.headers = {
|
|
authorization: auth
|
|
};
|
|
}
|
|
// Proxy
|
|
const agent = GitHub.getProxyAgent(defaults.baseUrl, options);
|
|
if (agent) {
|
|
defaults.request = { agent };
|
|
}
|
|
return graphql_1.graphql.defaults(defaults);
|
|
}
|
|
static getAuthString(token, options) {
|
|
// Validate args
|
|
if (!token && !options.auth) {
|
|
throw new Error('Parameter token or opts.auth is required');
|
|
}
|
|
else if (token && options.auth) {
|
|
throw new Error('Parameters token and opts.auth may not both be specified');
|
|
}
|
|
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
|
}
|
|
static getProxyAgent(destinationUrl, options) {
|
|
var _a;
|
|
if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
|
|
if (httpClient.getProxyUrl(destinationUrl)) {
|
|
const hc = new httpClient.HttpClient();
|
|
return hc.getAgent(destinationUrl);
|
|
}
|
|
}
|
|
return undefined;
|
|
}
|
|
static getApiBaseUrl() {
|
|
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
|
}
|
|
static getGraphQLBaseUrl() {
|
|
let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql';
|
|
// Shouldn't be a trailing slash, but remove if so
|
|
if (url.endsWith('/')) {
|
|
url = url.substr(0, url.length - 1);
|
|
}
|
|
// Remove trailing "/graphql"
|
|
if (url.toUpperCase().endsWith('/GRAPHQL')) {
|
|
url = url.substr(0, url.length - '/graphql'.length);
|
|
}
|
|
return url;
|
|
}
|
|
}
|
|
exports.GitHub = GitHub;
|
|
//# sourceMappingURL=github.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 470:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const command_1 = __webpack_require__(431);
|
|
const os = __webpack_require__(87);
|
|
const path = __webpack_require__(622);
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable
|
|
*/
|
|
function exportVariable(name, val) {
|
|
process.env[name] = val;
|
|
command_1.issueCommand('set-env', { name }, val);
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input. The value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store
|
|
*/
|
|
function setOutput(name, value) {
|
|
command_1.issueCommand('set-output', { name }, value);
|
|
}
|
|
exports.setOutput = setOutput;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message
|
|
*/
|
|
function error(message) {
|
|
command_1.issue('error', message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds an warning issue
|
|
* @param message warning issue message
|
|
*/
|
|
function warning(message) {
|
|
command_1.issue('warning', message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 471:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationBeforeRequest;
|
|
|
|
const btoa = __webpack_require__(675);
|
|
const uniq = __webpack_require__(126);
|
|
|
|
function authenticationBeforeRequest(state, options) {
|
|
if (!state.auth.type) {
|
|
return;
|
|
}
|
|
|
|
if (state.auth.type === "basic") {
|
|
const hash = btoa(`${state.auth.username}:${state.auth.password}`);
|
|
options.headers.authorization = `Basic ${hash}`;
|
|
return;
|
|
}
|
|
|
|
if (state.auth.type === "token") {
|
|
options.headers.authorization = `token ${state.auth.token}`;
|
|
return;
|
|
}
|
|
|
|
if (state.auth.type === "app") {
|
|
options.headers.authorization = `Bearer ${state.auth.token}`;
|
|
const acceptHeaders = options.headers.accept
|
|
.split(",")
|
|
.concat("application/vnd.github.machine-man-preview+json");
|
|
options.headers.accept = uniq(acceptHeaders)
|
|
.filter(Boolean)
|
|
.join(",");
|
|
return;
|
|
}
|
|
|
|
options.url += options.url.indexOf("?") === -1 ? "?" : "&";
|
|
|
|
if (state.auth.token) {
|
|
options.url += `access_token=${encodeURIComponent(state.auth.token)}`;
|
|
return;
|
|
}
|
|
|
|
const key = encodeURIComponent(state.auth.key);
|
|
const secret = encodeURIComponent(state.auth.secret);
|
|
options.url += `client_id=${key}&client_secret=${secret}`;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 489:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const path = __webpack_require__(622);
|
|
const which = __webpack_require__(814);
|
|
const pathKey = __webpack_require__(39)();
|
|
|
|
function resolveCommandAttempt(parsed, withoutPathExt) {
|
|
const cwd = process.cwd();
|
|
const hasCustomCwd = parsed.options.cwd != null;
|
|
|
|
// If a custom `cwd` was specified, we need to change the process cwd
|
|
// because `which` will do stat calls but does not support a custom cwd
|
|
if (hasCustomCwd) {
|
|
try {
|
|
process.chdir(parsed.options.cwd);
|
|
} catch (err) {
|
|
/* Empty */
|
|
}
|
|
}
|
|
|
|
let resolved;
|
|
|
|
try {
|
|
resolved = which.sync(parsed.command, {
|
|
path: (parsed.options.env || process.env)[pathKey],
|
|
pathExt: withoutPathExt ? path.delimiter : undefined,
|
|
});
|
|
} catch (e) {
|
|
/* Empty */
|
|
} finally {
|
|
process.chdir(cwd);
|
|
}
|
|
|
|
// If we successfully resolved, ensure that an absolute path is returned
|
|
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
|
|
if (resolved) {
|
|
resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
|
|
}
|
|
|
|
return resolved;
|
|
}
|
|
|
|
function resolveCommand(parsed) {
|
|
return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
|
|
}
|
|
|
|
module.exports = resolveCommand;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 497:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var deprecation = __webpack_require__(692);
|
|
var once = _interopDefault(__webpack_require__(969));
|
|
|
|
const logOnce = once(deprecation => console.warn(deprecation));
|
|
/**
|
|
* Error with extra properties to help with debugging
|
|
*/
|
|
|
|
class RequestError extends Error {
|
|
constructor(message, statusCode, options) {
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
this.name = "HttpError";
|
|
this.status = statusCode;
|
|
Object.defineProperty(this, "code", {
|
|
get() {
|
|
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
|
|
return statusCode;
|
|
}
|
|
|
|
});
|
|
this.headers = options.headers || {}; // redact request credentials without mutating original request options
|
|
|
|
const requestCopy = Object.assign({}, options.request);
|
|
|
|
if (options.request.headers.authorization) {
|
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
|
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
|
|
});
|
|
}
|
|
|
|
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
|
|
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
|
|
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
|
|
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
|
|
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
|
this.request = requestCopy;
|
|
}
|
|
|
|
}
|
|
|
|
exports.RequestError = RequestError;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 510:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = addHook
|
|
|
|
function addHook (state, kind, name, hook) {
|
|
var orig = hook
|
|
if (!state.registry[name]) {
|
|
state.registry[name] = []
|
|
}
|
|
|
|
if (kind === 'before') {
|
|
hook = function (method, options) {
|
|
return Promise.resolve()
|
|
.then(orig.bind(null, options))
|
|
.then(method.bind(null, options))
|
|
}
|
|
}
|
|
|
|
if (kind === 'after') {
|
|
hook = function (method, options) {
|
|
var result
|
|
return Promise.resolve()
|
|
.then(method.bind(null, options))
|
|
.then(function (result_) {
|
|
result = result_
|
|
return orig(result, options)
|
|
})
|
|
.then(function () {
|
|
return result
|
|
})
|
|
}
|
|
}
|
|
|
|
if (kind === 'error') {
|
|
hook = function (method, options) {
|
|
return Promise.resolve()
|
|
.then(method.bind(null, options))
|
|
.catch(function (error) {
|
|
return orig(error, options)
|
|
})
|
|
}
|
|
}
|
|
|
|
state.registry[name].push({
|
|
hook: hook,
|
|
orig: orig
|
|
})
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 523:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var register = __webpack_require__(363)
|
|
var addHook = __webpack_require__(510)
|
|
var removeHook = __webpack_require__(763)
|
|
|
|
// bind with array of arguments: https://stackoverflow.com/a/21792913
|
|
var bind = Function.bind
|
|
var bindable = bind.bind(bind)
|
|
|
|
function bindApi (hook, state, name) {
|
|
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
|
|
hook.api = { remove: removeHookRef }
|
|
hook.remove = removeHookRef
|
|
|
|
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
|
|
var args = name ? [state, kind, name] : [state, kind]
|
|
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
|
|
})
|
|
}
|
|
|
|
function HookSingular () {
|
|
var singularHookName = 'h'
|
|
var singularHookState = {
|
|
registry: {}
|
|
}
|
|
var singularHook = register.bind(null, singularHookState, singularHookName)
|
|
bindApi(singularHook, singularHookState, singularHookName)
|
|
return singularHook
|
|
}
|
|
|
|
function HookCollection () {
|
|
var state = {
|
|
registry: {}
|
|
}
|
|
|
|
var hook = register.bind(null, state)
|
|
bindApi(hook, state)
|
|
|
|
return hook
|
|
}
|
|
|
|
var collectionHookDeprecationMessageDisplayed = false
|
|
function Hook () {
|
|
if (!collectionHookDeprecationMessageDisplayed) {
|
|
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
|
|
collectionHookDeprecationMessageDisplayed = true
|
|
}
|
|
return HookCollection()
|
|
}
|
|
|
|
Hook.Singular = HookSingular.bind()
|
|
Hook.Collection = HookCollection.bind()
|
|
|
|
module.exports = Hook
|
|
// expose constructors as a named property for TypeScript
|
|
module.exports.Hook = Hook
|
|
module.exports.Singular = Hook.Singular
|
|
module.exports.Collection = Hook.Collection
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 528:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
function escape(value) {
|
|
return value.replace(/[^a-zA-Z0-9_]/g, x => {
|
|
return `\\${x}`;
|
|
});
|
|
}
|
|
exports.escape = escape;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 529:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
const factory = __webpack_require__(47);
|
|
|
|
module.exports = factory();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 533:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __webpack_require__(470);
|
|
const io = __webpack_require__(1);
|
|
const fs = __webpack_require__(747);
|
|
const os = __webpack_require__(87);
|
|
const path = __webpack_require__(622);
|
|
const httpm = __webpack_require__(874);
|
|
const semver = __webpack_require__(656);
|
|
const uuidV4 = __webpack_require__(826);
|
|
const exec_1 = __webpack_require__(986);
|
|
const assert_1 = __webpack_require__(357);
|
|
class HTTPError extends Error {
|
|
constructor(httpStatusCode) {
|
|
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
|
this.httpStatusCode = httpStatusCode;
|
|
Object.setPrototypeOf(this, new.target.prototype);
|
|
}
|
|
}
|
|
exports.HTTPError = HTTPError;
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
const userAgent = 'actions/tool-cache';
|
|
// On load grab temp directory and cache directory and remove them from env (currently don't want to expose this)
|
|
let tempDirectory = process.env['RUNNER_TEMP'] || '';
|
|
let cacheRoot = process.env['RUNNER_TOOL_CACHE'] || '';
|
|
// If directories not found, place them in common temp locations
|
|
if (!tempDirectory || !cacheRoot) {
|
|
let baseLocation;
|
|
if (IS_WINDOWS) {
|
|
// On windows use the USERPROFILE env variable
|
|
baseLocation = process.env['USERPROFILE'] || 'C:\\';
|
|
}
|
|
else {
|
|
if (process.platform === 'darwin') {
|
|
baseLocation = '/Users';
|
|
}
|
|
else {
|
|
baseLocation = '/home';
|
|
}
|
|
}
|
|
if (!tempDirectory) {
|
|
tempDirectory = path.join(baseLocation, 'actions', 'temp');
|
|
}
|
|
if (!cacheRoot) {
|
|
cacheRoot = path.join(baseLocation, 'actions', 'cache');
|
|
}
|
|
}
|
|
/**
|
|
* Download a tool from an url and stream it into a file
|
|
*
|
|
* @param url url of tool to download
|
|
* @returns path to downloaded tool
|
|
*/
|
|
function downloadTool(url) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Wrap in a promise so that we can resolve from within stream callbacks
|
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
const http = new httpm.HttpClient(userAgent, [], {
|
|
allowRetries: true,
|
|
maxRetries: 3
|
|
});
|
|
const destPath = path.join(tempDirectory, uuidV4());
|
|
yield io.mkdirP(tempDirectory);
|
|
core.debug(`Downloading ${url}`);
|
|
core.debug(`Downloading ${destPath}`);
|
|
if (fs.existsSync(destPath)) {
|
|
throw new Error(`Destination file path ${destPath} already exists`);
|
|
}
|
|
const response = yield http.get(url);
|
|
if (response.message.statusCode !== 200) {
|
|
const err = new HTTPError(response.message.statusCode);
|
|
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
throw err;
|
|
}
|
|
const file = fs.createWriteStream(destPath);
|
|
file.on('open', () => __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
const stream = response.message.pipe(file);
|
|
stream.on('close', () => {
|
|
core.debug('download complete');
|
|
resolve(destPath);
|
|
});
|
|
}
|
|
catch (err) {
|
|
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
reject(err);
|
|
}
|
|
}));
|
|
file.on('error', err => {
|
|
file.end();
|
|
reject(err);
|
|
});
|
|
}
|
|
catch (err) {
|
|
reject(err);
|
|
}
|
|
}));
|
|
});
|
|
}
|
|
exports.downloadTool = downloadTool;
|
|
/**
|
|
* Extract a .7z file
|
|
*
|
|
* @param file path to the .7z file
|
|
* @param dest destination directory. Optional.
|
|
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
|
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
|
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
|
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
|
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
|
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
|
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
|
* to 7zr.exe can be pass to this function.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extract7z(file, dest, _7zPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
|
assert_1.ok(file, 'parameter "file" is required');
|
|
dest = dest || (yield _createExtractFolder(dest));
|
|
const originalCwd = process.cwd();
|
|
process.chdir(dest);
|
|
if (_7zPath) {
|
|
try {
|
|
const args = [
|
|
'x',
|
|
'-bb1',
|
|
'-bd',
|
|
'-sccUTF-8',
|
|
file
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
else {
|
|
const escapedScript = path
|
|
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
|
.replace(/'/g, "''")
|
|
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
command
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
try {
|
|
const powershellPath = yield io.which('powershell', true);
|
|
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extract7z = extract7z;
|
|
/**
|
|
* Extract a tar
|
|
*
|
|
* @param file path to the tar
|
|
* @param dest destination directory. Optional.
|
|
* @param flags flags for the tar. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractTar(file, dest, flags = 'xz') {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
dest = dest || (yield _createExtractFolder(dest));
|
|
const tarPath = yield io.which('tar', true);
|
|
yield exec_1.exec(`"${tarPath}"`, [flags, '-C', dest, '-f', file]);
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractTar = extractTar;
|
|
/**
|
|
* Extract a zip
|
|
*
|
|
* @param file path to the zip
|
|
* @param dest destination directory. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractZip(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
dest = dest || (yield _createExtractFolder(dest));
|
|
if (IS_WINDOWS) {
|
|
yield extractZipWin(file, dest);
|
|
}
|
|
else {
|
|
yield extractZipNix(file, dest);
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractZip = extractZip;
|
|
function extractZipWin(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// build the powershell command
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
|
// run powershell
|
|
const powershellPath = yield io.which('powershell');
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
command
|
|
];
|
|
yield exec_1.exec(`"${powershellPath}"`, args);
|
|
});
|
|
}
|
|
function extractZipNix(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const unzipPath = yield io.which('unzip');
|
|
yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
|
|
});
|
|
}
|
|
/**
|
|
* Caches a directory and installs it into the tool cacheDir
|
|
*
|
|
* @param sourceDir the directory to cache into tools
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheDir(sourceDir, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source dir: ${sourceDir}`);
|
|
if (!fs.statSync(sourceDir).isDirectory()) {
|
|
throw new Error('sourceDir is not a directory');
|
|
}
|
|
// Create the tool dir
|
|
const destPath = yield _createToolPath(tool, version, arch);
|
|
// copy each child item. do not move. move can fail on Windows
|
|
// due to anti-virus software having an open handle on a file.
|
|
for (const itemName of fs.readdirSync(sourceDir)) {
|
|
const s = path.join(sourceDir, itemName);
|
|
yield io.cp(s, destPath, { recursive: true });
|
|
}
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destPath;
|
|
});
|
|
}
|
|
exports.cacheDir = cacheDir;
|
|
/**
|
|
* Caches a downloaded file (GUID) and installs it
|
|
* into the tool cache with a given targetName
|
|
*
|
|
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
|
* @param targetFile the name of the file name in the tools directory
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source file: ${sourceFile}`);
|
|
if (!fs.statSync(sourceFile).isFile()) {
|
|
throw new Error('sourceFile is not a file');
|
|
}
|
|
// create the tool dir
|
|
const destFolder = yield _createToolPath(tool, version, arch);
|
|
// copy instead of move. move can fail on Windows due to
|
|
// anti-virus software having an open handle on a file.
|
|
const destPath = path.join(destFolder, targetFile);
|
|
core.debug(`destination file ${destPath}`);
|
|
yield io.cp(sourceFile, destPath);
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destFolder;
|
|
});
|
|
}
|
|
exports.cacheFile = cacheFile;
|
|
/**
|
|
* Finds the path to a tool version in the local installed tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param versionSpec version of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function find(toolName, versionSpec, arch) {
|
|
if (!toolName) {
|
|
throw new Error('toolName parameter is required');
|
|
}
|
|
if (!versionSpec) {
|
|
throw new Error('versionSpec parameter is required');
|
|
}
|
|
arch = arch || os.arch();
|
|
// attempt to resolve an explicit version
|
|
if (!_isExplicitVersion(versionSpec)) {
|
|
const localVersions = findAllVersions(toolName, arch);
|
|
const match = _evaluateVersions(localVersions, versionSpec);
|
|
versionSpec = match;
|
|
}
|
|
// check for the explicit version in the cache
|
|
let toolPath = '';
|
|
if (versionSpec) {
|
|
versionSpec = semver.clean(versionSpec) || '';
|
|
const cachePath = path.join(cacheRoot, toolName, versionSpec, arch);
|
|
core.debug(`checking cache: ${cachePath}`);
|
|
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
|
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
|
toolPath = cachePath;
|
|
}
|
|
else {
|
|
core.debug('not found');
|
|
}
|
|
}
|
|
return toolPath;
|
|
}
|
|
exports.find = find;
|
|
/**
|
|
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function findAllVersions(toolName, arch) {
|
|
const versions = [];
|
|
arch = arch || os.arch();
|
|
const toolPath = path.join(cacheRoot, toolName);
|
|
if (fs.existsSync(toolPath)) {
|
|
const children = fs.readdirSync(toolPath);
|
|
for (const child of children) {
|
|
if (_isExplicitVersion(child)) {
|
|
const fullPath = path.join(toolPath, child, arch || '');
|
|
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
|
versions.push(child);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return versions;
|
|
}
|
|
exports.findAllVersions = findAllVersions;
|
|
function _createExtractFolder(dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!dest) {
|
|
// create a temp dir
|
|
dest = path.join(tempDirectory, uuidV4());
|
|
}
|
|
yield io.mkdirP(dest);
|
|
return dest;
|
|
});
|
|
}
|
|
function _createToolPath(tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
|
core.debug(`destination ${folderPath}`);
|
|
const markerPath = `${folderPath}.complete`;
|
|
yield io.rmRF(folderPath);
|
|
yield io.rmRF(markerPath);
|
|
yield io.mkdirP(folderPath);
|
|
return folderPath;
|
|
});
|
|
}
|
|
function _completeToolPath(tool, version, arch) {
|
|
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
|
const markerPath = `${folderPath}.complete`;
|
|
fs.writeFileSync(markerPath, '');
|
|
core.debug('finished caching tool');
|
|
}
|
|
function _isExplicitVersion(versionSpec) {
|
|
const c = semver.clean(versionSpec) || '';
|
|
core.debug(`isExplicit: ${c}`);
|
|
const valid = semver.valid(c) != null;
|
|
core.debug(`explicit? ${valid}`);
|
|
return valid;
|
|
}
|
|
function _evaluateVersions(versions, versionSpec) {
|
|
let version = '';
|
|
core.debug(`evaluating ${versions.length} versions`);
|
|
versions = versions.sort((a, b) => {
|
|
if (semver.gt(a, b)) {
|
|
return 1;
|
|
}
|
|
return -1;
|
|
});
|
|
for (let i = versions.length - 1; i >= 0; i--) {
|
|
const potential = versions[i];
|
|
const satisfied = semver.satisfies(potential, versionSpec);
|
|
if (satisfied) {
|
|
version = potential;
|
|
break;
|
|
}
|
|
}
|
|
if (version) {
|
|
core.debug(`matched: ${version}`);
|
|
}
|
|
else {
|
|
core.debug('match not found');
|
|
}
|
|
return version;
|
|
}
|
|
//# sourceMappingURL=tool-cache.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 536:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = hasFirstPage
|
|
|
|
const deprecate = __webpack_require__(370)
|
|
const getPageLinks = __webpack_require__(577)
|
|
|
|
function hasFirstPage (link) {
|
|
deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
|
|
return getPageLinks(link).first
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 539:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url = __webpack_require__(835);
|
|
const http = __webpack_require__(605);
|
|
const https = __webpack_require__(34);
|
|
const pm = __webpack_require__(950);
|
|
let tunnel;
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [
|
|
HttpCodes.MovedPermanently,
|
|
HttpCodes.ResourceMoved,
|
|
HttpCodes.SeeOther,
|
|
HttpCodes.TemporaryRedirect,
|
|
HttpCodes.PermanentRedirect
|
|
];
|
|
const HttpResponseRetryCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return new Promise(async (resolve, reject) => {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
let parsedUrl = url.parse(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
async getJson(requestUrl, additionalHeaders = {}) {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
let res = await this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
async request(verb, requestUrl, data, headers) {
|
|
if (this._disposed) {
|
|
throw new Error('Client has already been disposed.');
|
|
}
|
|
let parsedUrl = url.parse(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
|
? this._maxRetries + 1
|
|
: 1;
|
|
let numTries = 0;
|
|
let response;
|
|
while (numTries < maxTries) {
|
|
response = await this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response &&
|
|
response.message &&
|
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (let i = 0; i < this.handlers.length; i++) {
|
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|
authenticationHandler = this.handlers[i];
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
|
this._allowRedirects &&
|
|
redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers['location'];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
let parsedRedirectUrl = url.parse(redirectUrl);
|
|
if (parsedUrl.protocol == 'https:' &&
|
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
|
!this._allowRedirectDowngrade) {
|
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
await response.readBody();
|
|
// strip authorization header if redirected to a different hostname
|
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|
for (let header in headers) {
|
|
// header names are case insensitive
|
|
if (header.toLowerCase() === 'authorization') {
|
|
delete headers[header];
|
|
}
|
|
}
|
|
}
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = await this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
await response.readBody();
|
|
await this._performExponentialBackoff(numTries);
|
|
}
|
|
}
|
|
return response;
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return new Promise((resolve, reject) => {
|
|
let callbackForResult = function (err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
resolve(res);
|
|
};
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
let socket;
|
|
if (typeof data === 'string') {
|
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
let handleResult = (err, res) => {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
};
|
|
let req = info.httpModule.request(info.options, (msg) => {
|
|
let res = new HttpClientResponse(msg);
|
|
handleResult(null, res);
|
|
});
|
|
req.on('socket', sock => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err, null);
|
|
});
|
|
if (data && typeof data === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof data !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
let parsedUrl = url.parse(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port
|
|
? parseInt(info.parsedUrl.port)
|
|
: defaultPort;
|
|
info.options.path =
|
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers['user-agent'] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
this.handlers.forEach(handler => {
|
|
handler.prepareRequest(info.options);
|
|
});
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (!!agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (!!this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
if (useProxy) {
|
|
// If using proxy, need tunnel
|
|
if (!tunnel) {
|
|
tunnel = __webpack_require__(335);
|
|
}
|
|
const agentOptions = {
|
|
maxSockets: maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: {
|
|
proxyAuth: proxyUrl.auth,
|
|
host: proxyUrl.hostname,
|
|
port: proxyUrl.port
|
|
}
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, {
|
|
rejectUnauthorized: false
|
|
});
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
}
|
|
static dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
let a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
async _processResponse(res, options) {
|
|
return new Promise(async (resolve, reject) => {
|
|
const statusCode = res.message.statusCode;
|
|
const response = {
|
|
statusCode: statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode == HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
let obj;
|
|
let contents;
|
|
// get the result from the body
|
|
try {
|
|
contents = await res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = 'Failed request: (' + statusCode + ')';
|
|
}
|
|
let err = new Error(msg);
|
|
// attach statusCode and body obj (if available) to the error object
|
|
err['statusCode'] = statusCode;
|
|
if (response.result) {
|
|
err['result'] = response.result;
|
|
}
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 550:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = getNextPage
|
|
|
|
const getPage = __webpack_require__(265)
|
|
|
|
function getNextPage (octokit, link, headers) {
|
|
return getPage(octokit, link, 'next', headers)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 558:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = hasPreviousPage
|
|
|
|
const deprecate = __webpack_require__(370)
|
|
const getPageLinks = __webpack_require__(577)
|
|
|
|
function hasPreviousPage (link) {
|
|
deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
|
|
return getPageLinks(link).prev
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 559:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
class GitVersion {
|
|
/**
|
|
* Used for comparing the version of git and git-lfs against the minimum required version
|
|
* @param version the version string, e.g. 1.2 or 1.2.3
|
|
*/
|
|
constructor(version) {
|
|
this.major = NaN;
|
|
this.minor = NaN;
|
|
this.patch = NaN;
|
|
if (version) {
|
|
const match = version.match(/^(\d+)\.(\d+)(\.(\d+))?$/);
|
|
if (match) {
|
|
this.major = Number(match[1]);
|
|
this.minor = Number(match[2]);
|
|
if (match[4]) {
|
|
this.patch = Number(match[4]);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Compares the instance against a minimum required version
|
|
* @param minimum Minimum version
|
|
*/
|
|
checkMinimum(minimum) {
|
|
if (!minimum.isValid()) {
|
|
throw new Error('Arg minimum is not a valid version');
|
|
}
|
|
// Major is insufficient
|
|
if (this.major < minimum.major) {
|
|
return false;
|
|
}
|
|
// Major is equal
|
|
if (this.major === minimum.major) {
|
|
// Minor is insufficient
|
|
if (this.minor < minimum.minor) {
|
|
return false;
|
|
}
|
|
// Minor is equal
|
|
if (this.minor === minimum.minor) {
|
|
// Patch is insufficient
|
|
if (this.patch && this.patch < (minimum.patch || 0)) {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
/**
|
|
* Indicates whether the instance was constructed from a valid version string
|
|
*/
|
|
isValid() {
|
|
return !isNaN(this.major);
|
|
}
|
|
/**
|
|
* Returns the version as a string, e.g. 1.2 or 1.2.3
|
|
*/
|
|
toString() {
|
|
let result = '';
|
|
if (this.isValid()) {
|
|
result = `${this.major}.${this.minor}`;
|
|
if (!isNaN(this.patch)) {
|
|
result += `.${this.patch}`;
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
}
|
|
exports.GitVersion = GitVersion;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 562:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var osName = _interopDefault(__webpack_require__(2));
|
|
|
|
function getUserAgent() {
|
|
try {
|
|
return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
|
|
} catch (error) {
|
|
if (/wmic os get Caption/.test(error.message)) {
|
|
return "Windows <version undetectable>";
|
|
}
|
|
|
|
return "<environment undetectable>";
|
|
}
|
|
}
|
|
|
|
exports.getUserAgent = getUserAgent;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 563:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = getPreviousPage
|
|
|
|
const getPage = __webpack_require__(265)
|
|
|
|
function getPreviousPage (octokit, link, headers) {
|
|
return getPage(octokit, link, 'prev', headers)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 568:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const path = __webpack_require__(622);
|
|
const niceTry = __webpack_require__(948);
|
|
const resolveCommand = __webpack_require__(489);
|
|
const escape = __webpack_require__(462);
|
|
const readShebang = __webpack_require__(389);
|
|
const semver = __webpack_require__(280);
|
|
|
|
const isWin = process.platform === 'win32';
|
|
const isExecutableRegExp = /\.(?:com|exe)$/i;
|
|
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
|
|
|
|
// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
|
|
const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false;
|
|
|
|
function detectShebang(parsed) {
|
|
parsed.file = resolveCommand(parsed);
|
|
|
|
const shebang = parsed.file && readShebang(parsed.file);
|
|
|
|
if (shebang) {
|
|
parsed.args.unshift(parsed.file);
|
|
parsed.command = shebang;
|
|
|
|
return resolveCommand(parsed);
|
|
}
|
|
|
|
return parsed.file;
|
|
}
|
|
|
|
function parseNonShell(parsed) {
|
|
if (!isWin) {
|
|
return parsed;
|
|
}
|
|
|
|
// Detect & add support for shebangs
|
|
const commandFile = detectShebang(parsed);
|
|
|
|
// We don't need a shell if the command filename is an executable
|
|
const needsShell = !isExecutableRegExp.test(commandFile);
|
|
|
|
// If a shell is required, use cmd.exe and take care of escaping everything correctly
|
|
// Note that `forceShell` is an hidden option used only in tests
|
|
if (parsed.options.forceShell || needsShell) {
|
|
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
|
|
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
|
|
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
|
|
// we need to double escape them
|
|
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
|
|
|
|
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
|
|
// This is necessary otherwise it will always fail with ENOENT in those cases
|
|
parsed.command = path.normalize(parsed.command);
|
|
|
|
// Escape command & arguments
|
|
parsed.command = escape.command(parsed.command);
|
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
|
|
|
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
|
|
|
|
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
|
|
parsed.command = process.env.comspec || 'cmd.exe';
|
|
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
|
|
}
|
|
|
|
return parsed;
|
|
}
|
|
|
|
function parseShell(parsed) {
|
|
// If node supports the shell option, there's no need to mimic its behavior
|
|
if (supportsShellOption) {
|
|
return parsed;
|
|
}
|
|
|
|
// Mimic node shell option
|
|
// See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
|
|
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
|
|
|
|
if (isWin) {
|
|
parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
|
|
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
|
|
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
|
|
} else {
|
|
if (typeof parsed.options.shell === 'string') {
|
|
parsed.command = parsed.options.shell;
|
|
} else if (process.platform === 'android') {
|
|
parsed.command = '/system/bin/sh';
|
|
} else {
|
|
parsed.command = '/bin/sh';
|
|
}
|
|
|
|
parsed.args = ['-c', shellCommand];
|
|
}
|
|
|
|
return parsed;
|
|
}
|
|
|
|
function parse(command, args, options) {
|
|
// Normalize arguments, similar to nodejs
|
|
if (args && !Array.isArray(args)) {
|
|
options = args;
|
|
args = null;
|
|
}
|
|
|
|
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
|
|
options = Object.assign({}, options); // Clone object to avoid changing the original
|
|
|
|
// Build our parsed object
|
|
const parsed = {
|
|
command,
|
|
args,
|
|
options,
|
|
file: undefined,
|
|
original: {
|
|
command,
|
|
args,
|
|
},
|
|
};
|
|
|
|
// Delegate further parsing to shell or non-shell
|
|
return options.shell ? parseShell(parsed) : parseNonShell(parsed);
|
|
}
|
|
|
|
module.exports = parse;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 577:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = getPageLinks
|
|
|
|
function getPageLinks (link) {
|
|
link = link.link || link.headers.link || ''
|
|
|
|
const links = {}
|
|
|
|
// link format:
|
|
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
|
|
link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => {
|
|
links[type] = uri
|
|
})
|
|
|
|
return links
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 587:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const defaultMaxAttempts = 3;
|
|
const defaultMinSeconds = 10;
|
|
const defaultMaxSeconds = 20;
|
|
class RetryHelper {
|
|
constructor(maxAttempts = defaultMaxAttempts, minSeconds = defaultMinSeconds, maxSeconds = defaultMaxSeconds) {
|
|
this.maxAttempts = maxAttempts;
|
|
this.minSeconds = Math.floor(minSeconds);
|
|
this.maxSeconds = Math.floor(maxSeconds);
|
|
if (this.minSeconds > this.maxSeconds) {
|
|
throw new Error('min seconds should be less than or equal to max seconds');
|
|
}
|
|
}
|
|
execute(action) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let attempt = 1;
|
|
while (attempt < this.maxAttempts) {
|
|
// Try
|
|
try {
|
|
return yield action();
|
|
}
|
|
catch (err) {
|
|
core.info(err.message);
|
|
}
|
|
// Sleep
|
|
const seconds = this.getSleepAmount();
|
|
core.info(`Waiting ${seconds} seconds before trying again`);
|
|
yield this.sleep(seconds);
|
|
attempt++;
|
|
}
|
|
// Last attempt
|
|
return yield action();
|
|
});
|
|
}
|
|
getSleepAmount() {
|
|
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
|
this.minSeconds);
|
|
}
|
|
sleep(seconds) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
|
});
|
|
}
|
|
}
|
|
exports.RetryHelper = RetryHelper;
|
|
function execute(action) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const retryHelper = new RetryHelper();
|
|
return yield retryHelper.execute(action);
|
|
});
|
|
}
|
|
exports.execute = execute;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 605:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("http");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 614:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 618:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const fs = __importStar(__webpack_require__(747));
|
|
function directoryExistsSync(path, required) {
|
|
if (!path) {
|
|
throw new Error("Arg 'path' must not be empty");
|
|
}
|
|
let stats;
|
|
try {
|
|
stats = fs.statSync(path);
|
|
}
|
|
catch (error) {
|
|
if (error.code === 'ENOENT') {
|
|
if (!required) {
|
|
return false;
|
|
}
|
|
throw new Error(`Directory '${path}' does not exist`);
|
|
}
|
|
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${error.message}`);
|
|
}
|
|
if (stats.isDirectory()) {
|
|
return true;
|
|
}
|
|
else if (!required) {
|
|
return false;
|
|
}
|
|
throw new Error(`Directory '${path}' does not exist`);
|
|
}
|
|
exports.directoryExistsSync = directoryExistsSync;
|
|
function existsSync(path) {
|
|
if (!path) {
|
|
throw new Error("Arg 'path' must not be empty");
|
|
}
|
|
try {
|
|
fs.statSync(path);
|
|
}
|
|
catch (error) {
|
|
if (error.code === 'ENOENT') {
|
|
return false;
|
|
}
|
|
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${error.message}`);
|
|
}
|
|
return true;
|
|
}
|
|
exports.existsSync = existsSync;
|
|
function fileExistsSync(path) {
|
|
if (!path) {
|
|
throw new Error("Arg 'path' must not be empty");
|
|
}
|
|
let stats;
|
|
try {
|
|
stats = fs.statSync(path);
|
|
}
|
|
catch (error) {
|
|
if (error.code === 'ENOENT') {
|
|
return false;
|
|
}
|
|
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${error.message}`);
|
|
}
|
|
if (!stats.isDirectory()) {
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
exports.fileExistsSync = fileExistsSync;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 621:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const path = __webpack_require__(622);
|
|
const pathKey = __webpack_require__(39);
|
|
|
|
module.exports = opts => {
|
|
opts = Object.assign({
|
|
cwd: process.cwd(),
|
|
path: process.env[pathKey()]
|
|
}, opts);
|
|
|
|
let prev;
|
|
let pth = path.resolve(opts.cwd);
|
|
const ret = [];
|
|
|
|
while (prev !== pth) {
|
|
ret.push(path.join(pth, 'node_modules/.bin'));
|
|
prev = pth;
|
|
pth = path.resolve(pth, '..');
|
|
}
|
|
|
|
// ensure the running `node` binary is used
|
|
ret.push(path.dirname(process.execPath));
|
|
|
|
return ret.concat(opts.path).join(path.delimiter);
|
|
};
|
|
|
|
module.exports.env = opts => {
|
|
opts = Object.assign({
|
|
env: process.env
|
|
}, opts);
|
|
|
|
const env = Object.assign({}, opts.env);
|
|
const path = pathKey({env});
|
|
|
|
opts.path = env[path];
|
|
env[path] = module.exports(opts);
|
|
|
|
return env;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 622:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("path");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 631:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("net");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 649:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = getLastPage
|
|
|
|
const getPage = __webpack_require__(265)
|
|
|
|
function getLastPage (octokit, link, headers) {
|
|
return getPage(octokit, link, 'last', headers)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 654:
|
|
/***/ (function(module) {
|
|
|
|
// This is not the set of all possible signals.
|
|
//
|
|
// It IS, however, the set of all signals that trigger
|
|
// an exit on either Linux or BSD systems. Linux is a
|
|
// superset of the signal names supported on BSD, and
|
|
// the unknown signals just fail to register, so we can
|
|
// catch that easily enough.
|
|
//
|
|
// Don't bother with SIGKILL. It's uncatchable, which
|
|
// means that we can't fire any callbacks anyway.
|
|
//
|
|
// If a user does happen to register a handler on a non-
|
|
// fatal signal like SIGWINCH or something, and then
|
|
// exit, it'll end up firing `process.emit('exit')`, so
|
|
// the handler will be fired anyway.
|
|
//
|
|
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
|
|
// artificially, inherently leave the process in a
|
|
// state from which it is not safe to try and enter JS
|
|
// listeners.
|
|
module.exports = [
|
|
'SIGABRT',
|
|
'SIGALRM',
|
|
'SIGHUP',
|
|
'SIGINT',
|
|
'SIGTERM'
|
|
]
|
|
|
|
if (process.platform !== 'win32') {
|
|
module.exports.push(
|
|
'SIGVTALRM',
|
|
'SIGXCPU',
|
|
'SIGXFSZ',
|
|
'SIGUSR2',
|
|
'SIGTRAP',
|
|
'SIGSYS',
|
|
'SIGQUIT',
|
|
'SIGIOT'
|
|
// should detect profiler and enable/disable accordingly.
|
|
// see #21
|
|
// 'SIGPROF'
|
|
)
|
|
}
|
|
|
|
if (process.platform === 'linux') {
|
|
module.exports.push(
|
|
'SIGIO',
|
|
'SIGPOLL',
|
|
'SIGPWR',
|
|
'SIGSTKFLT',
|
|
'SIGUNUSED'
|
|
)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 656:
|
|
/***/ (function(module, exports) {
|
|
|
|
exports = module.exports = SemVer
|
|
|
|
var debug
|
|
/* istanbul ignore next */
|
|
if (typeof process === 'object' &&
|
|
process.env &&
|
|
process.env.NODE_DEBUG &&
|
|
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
|
|
debug = function () {
|
|
var args = Array.prototype.slice.call(arguments, 0)
|
|
args.unshift('SEMVER')
|
|
console.log.apply(console, args)
|
|
}
|
|
} else {
|
|
debug = function () {}
|
|
}
|
|
|
|
// Note: this is the semver.org version of the spec that it implements
|
|
// Not necessarily the package version of this code.
|
|
exports.SEMVER_SPEC_VERSION = '2.0.0'
|
|
|
|
var MAX_LENGTH = 256
|
|
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|
/* istanbul ignore next */ 9007199254740991
|
|
|
|
// Max safe segment length for coercion.
|
|
var MAX_SAFE_COMPONENT_LENGTH = 16
|
|
|
|
// The actual regexps go on exports.re
|
|
var re = exports.re = []
|
|
var src = exports.src = []
|
|
var t = exports.tokens = {}
|
|
var R = 0
|
|
|
|
function tok (n) {
|
|
t[n] = R++
|
|
}
|
|
|
|
// The following Regular Expressions can be used for tokenizing,
|
|
// validating, and parsing SemVer version strings.
|
|
|
|
// ## Numeric Identifier
|
|
// A single `0`, or a non-zero digit followed by zero or more digits.
|
|
|
|
tok('NUMERICIDENTIFIER')
|
|
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
|
tok('NUMERICIDENTIFIERLOOSE')
|
|
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
|
|
|
// ## Non-numeric Identifier
|
|
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
|
// more letters, digits, or hyphens.
|
|
|
|
tok('NONNUMERICIDENTIFIER')
|
|
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
|
|
|
// ## Main Version
|
|
// Three dot-separated numeric identifiers.
|
|
|
|
tok('MAINVERSION')
|
|
src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')'
|
|
|
|
tok('MAINVERSIONLOOSE')
|
|
src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
|
|
|
|
// ## Pre-release Version Identifier
|
|
// A numeric identifier, or a non-numeric identifier.
|
|
|
|
tok('PRERELEASEIDENTIFIER')
|
|
src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
tok('PRERELEASEIDENTIFIERLOOSE')
|
|
src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
// ## Pre-release Version
|
|
// Hyphen, followed by one or more dot-separated pre-release version
|
|
// identifiers.
|
|
|
|
tok('PRERELEASE')
|
|
src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
|
|
|
|
tok('PRERELEASELOOSE')
|
|
src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
|
|
|
|
// ## Build Metadata Identifier
|
|
// Any combination of digits, letters, or hyphens.
|
|
|
|
tok('BUILDIDENTIFIER')
|
|
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
|
|
|
// ## Build Metadata
|
|
// Plus sign, followed by one or more period-separated build metadata
|
|
// identifiers.
|
|
|
|
tok('BUILD')
|
|
src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
|
|
'(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
|
|
|
|
// ## Full Version String
|
|
// A main version, followed optionally by a pre-release version and
|
|
// build metadata.
|
|
|
|
// Note that the only major, minor, patch, and pre-release sections of
|
|
// the version string are capturing groups. The build metadata is not a
|
|
// capturing group, because it should not ever be used in version
|
|
// comparison.
|
|
|
|
tok('FULL')
|
|
tok('FULLPLAIN')
|
|
src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
|
|
src[t.PRERELEASE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
|
|
|
|
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
|
|
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
|
|
// common in the npm registry.
|
|
tok('LOOSEPLAIN')
|
|
src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
|
|
src[t.PRERELEASELOOSE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
tok('LOOSE')
|
|
src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
|
|
|
|
tok('GTLT')
|
|
src[t.GTLT] = '((?:<|>)?=?)'
|
|
|
|
// Something like "2.*" or "1.2.x".
|
|
// Note that "x.x" is a valid xRange identifer, meaning "any version"
|
|
// Only the first item is strictly required.
|
|
tok('XRANGEIDENTIFIERLOOSE')
|
|
src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
|
|
tok('XRANGEIDENTIFIER')
|
|
src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
|
|
|
|
tok('XRANGEPLAIN')
|
|
src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:' + src[t.PRERELEASE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGEPLAINLOOSE')
|
|
src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:' + src[t.PRERELEASELOOSE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGE')
|
|
src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
|
|
tok('XRANGELOOSE')
|
|
src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Coercion.
|
|
// Extract anything that could conceivably be a part of a valid semver
|
|
tok('COERCE')
|
|
src[t.COERCE] = '(^|[^\\d])' +
|
|
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:$|[^\\d])'
|
|
tok('COERCERTL')
|
|
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
|
|
|
// Tilde ranges.
|
|
// Meaning is "reasonably at or greater than"
|
|
tok('LONETILDE')
|
|
src[t.LONETILDE] = '(?:~>?)'
|
|
|
|
tok('TILDETRIM')
|
|
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
|
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
|
var tildeTrimReplace = '$1~'
|
|
|
|
tok('TILDE')
|
|
src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
|
|
tok('TILDELOOSE')
|
|
src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Caret ranges.
|
|
// Meaning is "at least and backwards compatible with"
|
|
tok('LONECARET')
|
|
src[t.LONECARET] = '(?:\\^)'
|
|
|
|
tok('CARETTRIM')
|
|
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
|
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
|
var caretTrimReplace = '$1^'
|
|
|
|
tok('CARET')
|
|
src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
|
|
tok('CARETLOOSE')
|
|
src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// A simple gt/lt/eq thing, or just "" to indicate "any version"
|
|
tok('COMPARATORLOOSE')
|
|
src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
|
|
tok('COMPARATOR')
|
|
src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
|
|
|
|
// An expression to strip any whitespace between the gtlt and the thing
|
|
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
|
|
tok('COMPARATORTRIM')
|
|
src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|
'\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
|
|
|
|
// this one has to use the /g flag
|
|
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
|
var comparatorTrimReplace = '$1$2$3'
|
|
|
|
// Something like `1.2.3 - 1.2.4`
|
|
// Note that these all use the loose form, because they'll be
|
|
// checked against either the strict or loose comparator form
|
|
// later.
|
|
tok('HYPHENRANGE')
|
|
src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s*$'
|
|
|
|
tok('HYPHENRANGELOOSE')
|
|
src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s*$'
|
|
|
|
// Star ranges basically just allow anything at all.
|
|
tok('STAR')
|
|
src[t.STAR] = '(<|>)?=?\\s*\\*'
|
|
|
|
// Compile to actual regexp objects.
|
|
// All are flag-free, unless they were created above with a flag.
|
|
for (var i = 0; i < R; i++) {
|
|
debug(i, src[i])
|
|
if (!re[i]) {
|
|
re[i] = new RegExp(src[i])
|
|
}
|
|
}
|
|
|
|
exports.parse = parse
|
|
function parse (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
return null
|
|
}
|
|
|
|
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
|
if (!r.test(version)) {
|
|
return null
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.valid = valid
|
|
function valid (version, options) {
|
|
var v = parse(version, options)
|
|
return v ? v.version : null
|
|
}
|
|
|
|
exports.clean = clean
|
|
function clean (version, options) {
|
|
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
|
|
return s ? s.version : null
|
|
}
|
|
|
|
exports.SemVer = SemVer
|
|
|
|
function SemVer (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
if (version instanceof SemVer) {
|
|
if (version.loose === options.loose) {
|
|
return version
|
|
} else {
|
|
version = version.version
|
|
}
|
|
} else if (typeof version !== 'string') {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
|
|
}
|
|
|
|
if (!(this instanceof SemVer)) {
|
|
return new SemVer(version, options)
|
|
}
|
|
|
|
debug('SemVer', version, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
|
|
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
this.raw = version
|
|
|
|
// these are actually numbers
|
|
this.major = +m[1]
|
|
this.minor = +m[2]
|
|
this.patch = +m[3]
|
|
|
|
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
|
|
throw new TypeError('Invalid major version')
|
|
}
|
|
|
|
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
|
|
throw new TypeError('Invalid minor version')
|
|
}
|
|
|
|
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
|
|
throw new TypeError('Invalid patch version')
|
|
}
|
|
|
|
// numberify any prerelease numeric ids
|
|
if (!m[4]) {
|
|
this.prerelease = []
|
|
} else {
|
|
this.prerelease = m[4].split('.').map(function (id) {
|
|
if (/^[0-9]+$/.test(id)) {
|
|
var num = +id
|
|
if (num >= 0 && num < MAX_SAFE_INTEGER) {
|
|
return num
|
|
}
|
|
}
|
|
return id
|
|
})
|
|
}
|
|
|
|
this.build = m[5] ? m[5].split('.') : []
|
|
this.format()
|
|
}
|
|
|
|
SemVer.prototype.format = function () {
|
|
this.version = this.major + '.' + this.minor + '.' + this.patch
|
|
if (this.prerelease.length) {
|
|
this.version += '-' + this.prerelease.join('.')
|
|
}
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.toString = function () {
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.compare = function (other) {
|
|
debug('SemVer.compare', this.version, this.options, other)
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return this.compareMain(other) || this.comparePre(other)
|
|
}
|
|
|
|
SemVer.prototype.compareMain = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return compareIdentifiers(this.major, other.major) ||
|
|
compareIdentifiers(this.minor, other.minor) ||
|
|
compareIdentifiers(this.patch, other.patch)
|
|
}
|
|
|
|
SemVer.prototype.comparePre = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
// NOT having a prerelease is > having one
|
|
if (this.prerelease.length && !other.prerelease.length) {
|
|
return -1
|
|
} else if (!this.prerelease.length && other.prerelease.length) {
|
|
return 1
|
|
} else if (!this.prerelease.length && !other.prerelease.length) {
|
|
return 0
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.prerelease[i]
|
|
var b = other.prerelease[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
SemVer.prototype.compareBuild = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.build[i]
|
|
var b = other.build[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
// preminor will bump the version up to the next minor release, and immediately
|
|
// down to pre-release. premajor and prepatch work the same way.
|
|
SemVer.prototype.inc = function (release, identifier) {
|
|
switch (release) {
|
|
case 'premajor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor = 0
|
|
this.major++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'preminor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'prepatch':
|
|
// If this is already a prerelease, it will bump to the next version
|
|
// drop any prereleases that might already exist, since they are not
|
|
// relevant at this point.
|
|
this.prerelease.length = 0
|
|
this.inc('patch', identifier)
|
|
this.inc('pre', identifier)
|
|
break
|
|
// If the input is a non-prerelease version, this acts the same as
|
|
// prepatch.
|
|
case 'prerelease':
|
|
if (this.prerelease.length === 0) {
|
|
this.inc('patch', identifier)
|
|
}
|
|
this.inc('pre', identifier)
|
|
break
|
|
|
|
case 'major':
|
|
// If this is a pre-major version, bump up to the same major version.
|
|
// Otherwise increment major.
|
|
// 1.0.0-5 bumps to 1.0.0
|
|
// 1.1.0 bumps to 2.0.0
|
|
if (this.minor !== 0 ||
|
|
this.patch !== 0 ||
|
|
this.prerelease.length === 0) {
|
|
this.major++
|
|
}
|
|
this.minor = 0
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'minor':
|
|
// If this is a pre-minor version, bump up to the same minor version.
|
|
// Otherwise increment minor.
|
|
// 1.2.0-5 bumps to 1.2.0
|
|
// 1.2.1 bumps to 1.3.0
|
|
if (this.patch !== 0 || this.prerelease.length === 0) {
|
|
this.minor++
|
|
}
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'patch':
|
|
// If this is not a pre-release version, it will increment the patch.
|
|
// If it is a pre-release it will bump up to the same patch version.
|
|
// 1.2.0-5 patches to 1.2.0
|
|
// 1.2.0 patches to 1.2.1
|
|
if (this.prerelease.length === 0) {
|
|
this.patch++
|
|
}
|
|
this.prerelease = []
|
|
break
|
|
// This probably shouldn't be used publicly.
|
|
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
|
|
case 'pre':
|
|
if (this.prerelease.length === 0) {
|
|
this.prerelease = [0]
|
|
} else {
|
|
var i = this.prerelease.length
|
|
while (--i >= 0) {
|
|
if (typeof this.prerelease[i] === 'number') {
|
|
this.prerelease[i]++
|
|
i = -2
|
|
}
|
|
}
|
|
if (i === -1) {
|
|
// didn't increment anything
|
|
this.prerelease.push(0)
|
|
}
|
|
}
|
|
if (identifier) {
|
|
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
|
|
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
|
|
if (this.prerelease[0] === identifier) {
|
|
if (isNaN(this.prerelease[1])) {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
} else {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
}
|
|
break
|
|
|
|
default:
|
|
throw new Error('invalid increment argument: ' + release)
|
|
}
|
|
this.format()
|
|
this.raw = this.version
|
|
return this
|
|
}
|
|
|
|
exports.inc = inc
|
|
function inc (version, release, loose, identifier) {
|
|
if (typeof (loose) === 'string') {
|
|
identifier = loose
|
|
loose = undefined
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, loose).inc(release, identifier).version
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.diff = diff
|
|
function diff (version1, version2) {
|
|
if (eq(version1, version2)) {
|
|
return null
|
|
} else {
|
|
var v1 = parse(version1)
|
|
var v2 = parse(version2)
|
|
var prefix = ''
|
|
if (v1.prerelease.length || v2.prerelease.length) {
|
|
prefix = 'pre'
|
|
var defaultResult = 'prerelease'
|
|
}
|
|
for (var key in v1) {
|
|
if (key === 'major' || key === 'minor' || key === 'patch') {
|
|
if (v1[key] !== v2[key]) {
|
|
return prefix + key
|
|
}
|
|
}
|
|
}
|
|
return defaultResult // may be undefined
|
|
}
|
|
}
|
|
|
|
exports.compareIdentifiers = compareIdentifiers
|
|
|
|
var numeric = /^[0-9]+$/
|
|
function compareIdentifiers (a, b) {
|
|
var anum = numeric.test(a)
|
|
var bnum = numeric.test(b)
|
|
|
|
if (anum && bnum) {
|
|
a = +a
|
|
b = +b
|
|
}
|
|
|
|
return a === b ? 0
|
|
: (anum && !bnum) ? -1
|
|
: (bnum && !anum) ? 1
|
|
: a < b ? -1
|
|
: 1
|
|
}
|
|
|
|
exports.rcompareIdentifiers = rcompareIdentifiers
|
|
function rcompareIdentifiers (a, b) {
|
|
return compareIdentifiers(b, a)
|
|
}
|
|
|
|
exports.major = major
|
|
function major (a, loose) {
|
|
return new SemVer(a, loose).major
|
|
}
|
|
|
|
exports.minor = minor
|
|
function minor (a, loose) {
|
|
return new SemVer(a, loose).minor
|
|
}
|
|
|
|
exports.patch = patch
|
|
function patch (a, loose) {
|
|
return new SemVer(a, loose).patch
|
|
}
|
|
|
|
exports.compare = compare
|
|
function compare (a, b, loose) {
|
|
return new SemVer(a, loose).compare(new SemVer(b, loose))
|
|
}
|
|
|
|
exports.compareLoose = compareLoose
|
|
function compareLoose (a, b) {
|
|
return compare(a, b, true)
|
|
}
|
|
|
|
exports.compareBuild = compareBuild
|
|
function compareBuild (a, b, loose) {
|
|
var versionA = new SemVer(a, loose)
|
|
var versionB = new SemVer(b, loose)
|
|
return versionA.compare(versionB) || versionA.compareBuild(versionB)
|
|
}
|
|
|
|
exports.rcompare = rcompare
|
|
function rcompare (a, b, loose) {
|
|
return compare(b, a, loose)
|
|
}
|
|
|
|
exports.sort = sort
|
|
function sort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(a, b, loose)
|
|
})
|
|
}
|
|
|
|
exports.rsort = rsort
|
|
function rsort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(b, a, loose)
|
|
})
|
|
}
|
|
|
|
exports.gt = gt
|
|
function gt (a, b, loose) {
|
|
return compare(a, b, loose) > 0
|
|
}
|
|
|
|
exports.lt = lt
|
|
function lt (a, b, loose) {
|
|
return compare(a, b, loose) < 0
|
|
}
|
|
|
|
exports.eq = eq
|
|
function eq (a, b, loose) {
|
|
return compare(a, b, loose) === 0
|
|
}
|
|
|
|
exports.neq = neq
|
|
function neq (a, b, loose) {
|
|
return compare(a, b, loose) !== 0
|
|
}
|
|
|
|
exports.gte = gte
|
|
function gte (a, b, loose) {
|
|
return compare(a, b, loose) >= 0
|
|
}
|
|
|
|
exports.lte = lte
|
|
function lte (a, b, loose) {
|
|
return compare(a, b, loose) <= 0
|
|
}
|
|
|
|
exports.cmp = cmp
|
|
function cmp (a, op, b, loose) {
|
|
switch (op) {
|
|
case '===':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a === b
|
|
|
|
case '!==':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a !== b
|
|
|
|
case '':
|
|
case '=':
|
|
case '==':
|
|
return eq(a, b, loose)
|
|
|
|
case '!=':
|
|
return neq(a, b, loose)
|
|
|
|
case '>':
|
|
return gt(a, b, loose)
|
|
|
|
case '>=':
|
|
return gte(a, b, loose)
|
|
|
|
case '<':
|
|
return lt(a, b, loose)
|
|
|
|
case '<=':
|
|
return lte(a, b, loose)
|
|
|
|
default:
|
|
throw new TypeError('Invalid operator: ' + op)
|
|
}
|
|
}
|
|
|
|
exports.Comparator = Comparator
|
|
function Comparator (comp, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (comp instanceof Comparator) {
|
|
if (comp.loose === !!options.loose) {
|
|
return comp
|
|
} else {
|
|
comp = comp.value
|
|
}
|
|
}
|
|
|
|
if (!(this instanceof Comparator)) {
|
|
return new Comparator(comp, options)
|
|
}
|
|
|
|
debug('comparator', comp, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.parse(comp)
|
|
|
|
if (this.semver === ANY) {
|
|
this.value = ''
|
|
} else {
|
|
this.value = this.operator + this.semver.version
|
|
}
|
|
|
|
debug('comp', this)
|
|
}
|
|
|
|
var ANY = {}
|
|
Comparator.prototype.parse = function (comp) {
|
|
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var m = comp.match(r)
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid comparator: ' + comp)
|
|
}
|
|
|
|
this.operator = m[1] !== undefined ? m[1] : ''
|
|
if (this.operator === '=') {
|
|
this.operator = ''
|
|
}
|
|
|
|
// if it literally is just '>' or '' then allow anything.
|
|
if (!m[2]) {
|
|
this.semver = ANY
|
|
} else {
|
|
this.semver = new SemVer(m[2], this.options.loose)
|
|
}
|
|
}
|
|
|
|
Comparator.prototype.toString = function () {
|
|
return this.value
|
|
}
|
|
|
|
Comparator.prototype.test = function (version) {
|
|
debug('Comparator.test', version, this.options.loose)
|
|
|
|
if (this.semver === ANY || version === ANY) {
|
|
return true
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
return cmp(version, this.operator, this.semver, this.options)
|
|
}
|
|
|
|
Comparator.prototype.intersects = function (comp, options) {
|
|
if (!(comp instanceof Comparator)) {
|
|
throw new TypeError('a Comparator is required')
|
|
}
|
|
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
var rangeTmp
|
|
|
|
if (this.operator === '') {
|
|
if (this.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(comp.value, options)
|
|
return satisfies(this.value, rangeTmp, options)
|
|
} else if (comp.operator === '') {
|
|
if (comp.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(this.value, options)
|
|
return satisfies(comp.semver, rangeTmp, options)
|
|
}
|
|
|
|
var sameDirectionIncreasing =
|
|
(this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '>=' || comp.operator === '>')
|
|
var sameDirectionDecreasing =
|
|
(this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '<=' || comp.operator === '<')
|
|
var sameSemVer = this.semver.version === comp.semver.version
|
|
var differentDirectionsInclusive =
|
|
(this.operator === '>=' || this.operator === '<=') &&
|
|
(comp.operator === '>=' || comp.operator === '<=')
|
|
var oppositeDirectionsLessThan =
|
|
cmp(this.semver, '<', comp.semver, options) &&
|
|
((this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '<=' || comp.operator === '<'))
|
|
var oppositeDirectionsGreaterThan =
|
|
cmp(this.semver, '>', comp.semver, options) &&
|
|
((this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '>=' || comp.operator === '>'))
|
|
|
|
return sameDirectionIncreasing || sameDirectionDecreasing ||
|
|
(sameSemVer && differentDirectionsInclusive) ||
|
|
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
|
|
}
|
|
|
|
exports.Range = Range
|
|
function Range (range, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (range instanceof Range) {
|
|
if (range.loose === !!options.loose &&
|
|
range.includePrerelease === !!options.includePrerelease) {
|
|
return range
|
|
} else {
|
|
return new Range(range.raw, options)
|
|
}
|
|
}
|
|
|
|
if (range instanceof Comparator) {
|
|
return new Range(range.value, options)
|
|
}
|
|
|
|
if (!(this instanceof Range)) {
|
|
return new Range(range, options)
|
|
}
|
|
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.includePrerelease = !!options.includePrerelease
|
|
|
|
// First, split based on boolean or ||
|
|
this.raw = range
|
|
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
|
return this.parseRange(range.trim())
|
|
}, this).filter(function (c) {
|
|
// throw out any that are not relevant for whatever reason
|
|
return c.length
|
|
})
|
|
|
|
if (!this.set.length) {
|
|
throw new TypeError('Invalid SemVer Range: ' + range)
|
|
}
|
|
|
|
this.format()
|
|
}
|
|
|
|
Range.prototype.format = function () {
|
|
this.range = this.set.map(function (comps) {
|
|
return comps.join(' ').trim()
|
|
}).join('||').trim()
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.toString = function () {
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.parseRange = function (range) {
|
|
var loose = this.options.loose
|
|
range = range.trim()
|
|
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
|
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
|
range = range.replace(hr, hyphenReplace)
|
|
debug('hyphen replace', range)
|
|
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
|
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
|
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
|
|
|
// `~ 1.2.3` => `~1.2.3`
|
|
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
|
|
|
// `^ 1.2.3` => `^1.2.3`
|
|
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
|
|
|
// normalize spaces
|
|
range = range.split(/\s+/).join(' ')
|
|
|
|
// At this point, the range is completely trimmed and
|
|
// ready to be split into comparators.
|
|
|
|
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var set = range.split(' ').map(function (comp) {
|
|
return parseComparator(comp, this.options)
|
|
}, this).join(' ').split(/\s+/)
|
|
if (this.options.loose) {
|
|
// in loose mode, throw out any that are not valid comparators
|
|
set = set.filter(function (comp) {
|
|
return !!comp.match(compRe)
|
|
})
|
|
}
|
|
set = set.map(function (comp) {
|
|
return new Comparator(comp, this.options)
|
|
}, this)
|
|
|
|
return set
|
|
}
|
|
|
|
Range.prototype.intersects = function (range, options) {
|
|
if (!(range instanceof Range)) {
|
|
throw new TypeError('a Range is required')
|
|
}
|
|
|
|
return this.set.some(function (thisComparators) {
|
|
return (
|
|
isSatisfiable(thisComparators, options) &&
|
|
range.set.some(function (rangeComparators) {
|
|
return (
|
|
isSatisfiable(rangeComparators, options) &&
|
|
thisComparators.every(function (thisComparator) {
|
|
return rangeComparators.every(function (rangeComparator) {
|
|
return thisComparator.intersects(rangeComparator, options)
|
|
})
|
|
})
|
|
)
|
|
})
|
|
)
|
|
})
|
|
}
|
|
|
|
// take a set of comparators and determine whether there
|
|
// exists a version which can satisfy it
|
|
function isSatisfiable (comparators, options) {
|
|
var result = true
|
|
var remainingComparators = comparators.slice()
|
|
var testComparator = remainingComparators.pop()
|
|
|
|
while (result && remainingComparators.length) {
|
|
result = remainingComparators.every(function (otherComparator) {
|
|
return testComparator.intersects(otherComparator, options)
|
|
})
|
|
|
|
testComparator = remainingComparators.pop()
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
// Mostly just for testing and legacy API reasons
|
|
exports.toComparators = toComparators
|
|
function toComparators (range, options) {
|
|
return new Range(range, options).set.map(function (comp) {
|
|
return comp.map(function (c) {
|
|
return c.value
|
|
}).join(' ').trim().split(' ')
|
|
})
|
|
}
|
|
|
|
// comprised of xranges, tildes, stars, and gtlt's at this point.
|
|
// already replaced the hyphen ranges
|
|
// turn into a set of JUST comparators.
|
|
function parseComparator (comp, options) {
|
|
debug('comp', comp, options)
|
|
comp = replaceCarets(comp, options)
|
|
debug('caret', comp)
|
|
comp = replaceTildes(comp, options)
|
|
debug('tildes', comp)
|
|
comp = replaceXRanges(comp, options)
|
|
debug('xrange', comp)
|
|
comp = replaceStars(comp, options)
|
|
debug('stars', comp)
|
|
return comp
|
|
}
|
|
|
|
function isX (id) {
|
|
return !id || id.toLowerCase() === 'x' || id === '*'
|
|
}
|
|
|
|
// ~, ~> --> * (any, kinda silly)
|
|
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
|
|
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
|
|
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
|
|
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
|
|
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
|
|
function replaceTildes (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceTilde(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceTilde (comp, options) {
|
|
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('tilde', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
// ~1.2 == >=1.2.0 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else if (pr) {
|
|
debug('replaceTilde pr', pr)
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
// ~1.2.3 == >=1.2.3 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
|
|
debug('tilde return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// ^ --> * (any, kinda silly)
|
|
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
|
|
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
|
|
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
|
|
// ^1.2.3 --> >=1.2.3 <2.0.0
|
|
// ^1.2.0 --> >=1.2.0 <2.0.0
|
|
function replaceCarets (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceCaret(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceCaret (comp, options) {
|
|
debug('caret', comp, options)
|
|
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('caret', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
if (M === '0') {
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else if (pr) {
|
|
debug('replaceCaret pr', pr)
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else {
|
|
debug('no pr')
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
}
|
|
|
|
debug('caret return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
function replaceXRanges (comp, options) {
|
|
debug('replaceXRanges', comp, options)
|
|
return comp.split(/\s+/).map(function (comp) {
|
|
return replaceXRange(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceXRange (comp, options) {
|
|
comp = comp.trim()
|
|
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
|
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
|
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
|
var xM = isX(M)
|
|
var xm = xM || isX(m)
|
|
var xp = xm || isX(p)
|
|
var anyX = xp
|
|
|
|
if (gtlt === '=' && anyX) {
|
|
gtlt = ''
|
|
}
|
|
|
|
// if we're including prereleases in the match, then we need
|
|
// to fix this to -0, the lowest possible prerelease value
|
|
pr = options.includePrerelease ? '-0' : ''
|
|
|
|
if (xM) {
|
|
if (gtlt === '>' || gtlt === '<') {
|
|
// nothing is allowed
|
|
ret = '<0.0.0-0'
|
|
} else {
|
|
// nothing is forbidden
|
|
ret = '*'
|
|
}
|
|
} else if (gtlt && anyX) {
|
|
// we know patch is an x, because we have any x at all.
|
|
// replace X with 0
|
|
if (xm) {
|
|
m = 0
|
|
}
|
|
p = 0
|
|
|
|
if (gtlt === '>') {
|
|
// >1 => >=2.0.0
|
|
// >1.2 => >=1.3.0
|
|
// >1.2.3 => >= 1.2.4
|
|
gtlt = '>='
|
|
if (xm) {
|
|
M = +M + 1
|
|
m = 0
|
|
p = 0
|
|
} else {
|
|
m = +m + 1
|
|
p = 0
|
|
}
|
|
} else if (gtlt === '<=') {
|
|
// <=0.7.x is actually <0.8.0, since any 0.7.x should
|
|
// pass. Similarly, <=7.x is actually <8.0.0, etc.
|
|
gtlt = '<'
|
|
if (xm) {
|
|
M = +M + 1
|
|
} else {
|
|
m = +m + 1
|
|
}
|
|
}
|
|
|
|
ret = gtlt + M + '.' + m + '.' + p + pr
|
|
} else if (xm) {
|
|
ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
|
|
} else if (xp) {
|
|
ret = '>=' + M + '.' + m + '.0' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0' + pr
|
|
}
|
|
|
|
debug('xRange return', ret)
|
|
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// Because * is AND-ed with everything else in the comparator,
|
|
// and '' means "any version", just remove the *s entirely.
|
|
function replaceStars (comp, options) {
|
|
debug('replaceStars', comp, options)
|
|
// Looseness is ignored here. star is always as loose as it gets!
|
|
return comp.trim().replace(re[t.STAR], '')
|
|
}
|
|
|
|
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
|
// M, m, patch, prerelease, build
|
|
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
|
|
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
|
|
// 1.2 - 3.4 => >=1.2.0 <3.5.0
|
|
function hyphenReplace ($0,
|
|
from, fM, fm, fp, fpr, fb,
|
|
to, tM, tm, tp, tpr, tb) {
|
|
if (isX(fM)) {
|
|
from = ''
|
|
} else if (isX(fm)) {
|
|
from = '>=' + fM + '.0.0'
|
|
} else if (isX(fp)) {
|
|
from = '>=' + fM + '.' + fm + '.0'
|
|
} else {
|
|
from = '>=' + from
|
|
}
|
|
|
|
if (isX(tM)) {
|
|
to = ''
|
|
} else if (isX(tm)) {
|
|
to = '<' + (+tM + 1) + '.0.0'
|
|
} else if (isX(tp)) {
|
|
to = '<' + tM + '.' + (+tm + 1) + '.0'
|
|
} else if (tpr) {
|
|
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
|
|
} else {
|
|
to = '<=' + to
|
|
}
|
|
|
|
return (from + ' ' + to).trim()
|
|
}
|
|
|
|
// if ANY of the sets match ALL of its comparators, then pass
|
|
Range.prototype.test = function (version) {
|
|
if (!version) {
|
|
return false
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
for (var i = 0; i < this.set.length; i++) {
|
|
if (testSet(this.set[i], version, this.options)) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
function testSet (set, version, options) {
|
|
for (var i = 0; i < set.length; i++) {
|
|
if (!set[i].test(version)) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
if (version.prerelease.length && !options.includePrerelease) {
|
|
// Find the set of versions that are allowed to have prereleases
|
|
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
|
|
// That should allow `1.2.3-pr.2` to pass.
|
|
// However, `1.2.4-alpha.notready` should NOT be allowed,
|
|
// even though it's within the range set by the comparators.
|
|
for (i = 0; i < set.length; i++) {
|
|
debug(set[i].semver)
|
|
if (set[i].semver === ANY) {
|
|
continue
|
|
}
|
|
|
|
if (set[i].semver.prerelease.length > 0) {
|
|
var allowed = set[i].semver
|
|
if (allowed.major === version.major &&
|
|
allowed.minor === version.minor &&
|
|
allowed.patch === version.patch) {
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
// Version has a -pre, but it's not one of the ones we like.
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
exports.satisfies = satisfies
|
|
function satisfies (version, range, options) {
|
|
try {
|
|
range = new Range(range, options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
return range.test(version)
|
|
}
|
|
|
|
exports.maxSatisfying = maxSatisfying
|
|
function maxSatisfying (versions, range, options) {
|
|
var max = null
|
|
var maxSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!max || maxSV.compare(v) === -1) {
|
|
// compare(max, v, true)
|
|
max = v
|
|
maxSV = new SemVer(max, options)
|
|
}
|
|
}
|
|
})
|
|
return max
|
|
}
|
|
|
|
exports.minSatisfying = minSatisfying
|
|
function minSatisfying (versions, range, options) {
|
|
var min = null
|
|
var minSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!min || minSV.compare(v) === 1) {
|
|
// compare(min, v, true)
|
|
min = v
|
|
minSV = new SemVer(min, options)
|
|
}
|
|
}
|
|
})
|
|
return min
|
|
}
|
|
|
|
exports.minVersion = minVersion
|
|
function minVersion (range, loose) {
|
|
range = new Range(range, loose)
|
|
|
|
var minver = new SemVer('0.0.0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = new SemVer('0.0.0-0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = null
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
comparators.forEach(function (comparator) {
|
|
// Clone to avoid manipulating the comparator's semver object.
|
|
var compver = new SemVer(comparator.semver.version)
|
|
switch (comparator.operator) {
|
|
case '>':
|
|
if (compver.prerelease.length === 0) {
|
|
compver.patch++
|
|
} else {
|
|
compver.prerelease.push(0)
|
|
}
|
|
compver.raw = compver.format()
|
|
/* fallthrough */
|
|
case '':
|
|
case '>=':
|
|
if (!minver || gt(minver, compver)) {
|
|
minver = compver
|
|
}
|
|
break
|
|
case '<':
|
|
case '<=':
|
|
/* Ignore maximum versions */
|
|
break
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw new Error('Unexpected operation: ' + comparator.operator)
|
|
}
|
|
})
|
|
}
|
|
|
|
if (minver && range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
exports.validRange = validRange
|
|
function validRange (range, options) {
|
|
try {
|
|
// Return '*' instead of '' so that truthiness works.
|
|
// This will throw if it's invalid anyway
|
|
return new Range(range, options).range || '*'
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
// Determine if version is less than all the versions possible in the range
|
|
exports.ltr = ltr
|
|
function ltr (version, range, options) {
|
|
return outside(version, range, '<', options)
|
|
}
|
|
|
|
// Determine if version is greater than all the versions possible in the range.
|
|
exports.gtr = gtr
|
|
function gtr (version, range, options) {
|
|
return outside(version, range, '>', options)
|
|
}
|
|
|
|
exports.outside = outside
|
|
function outside (version, range, hilo, options) {
|
|
version = new SemVer(version, options)
|
|
range = new Range(range, options)
|
|
|
|
var gtfn, ltefn, ltfn, comp, ecomp
|
|
switch (hilo) {
|
|
case '>':
|
|
gtfn = gt
|
|
ltefn = lte
|
|
ltfn = lt
|
|
comp = '>'
|
|
ecomp = '>='
|
|
break
|
|
case '<':
|
|
gtfn = lt
|
|
ltefn = gte
|
|
ltfn = gt
|
|
comp = '<'
|
|
ecomp = '<='
|
|
break
|
|
default:
|
|
throw new TypeError('Must provide a hilo val of "<" or ">"')
|
|
}
|
|
|
|
// If it satisifes the range it is not outside
|
|
if (satisfies(version, range, options)) {
|
|
return false
|
|
}
|
|
|
|
// From now on, variable terms are as if we're in "gtr" mode.
|
|
// but note that everything is flipped for the "ltr" function.
|
|
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
var high = null
|
|
var low = null
|
|
|
|
comparators.forEach(function (comparator) {
|
|
if (comparator.semver === ANY) {
|
|
comparator = new Comparator('>=0.0.0')
|
|
}
|
|
high = high || comparator
|
|
low = low || comparator
|
|
if (gtfn(comparator.semver, high.semver, options)) {
|
|
high = comparator
|
|
} else if (ltfn(comparator.semver, low.semver, options)) {
|
|
low = comparator
|
|
}
|
|
})
|
|
|
|
// If the edge version comparator has a operator then our version
|
|
// isn't outside it
|
|
if (high.operator === comp || high.operator === ecomp) {
|
|
return false
|
|
}
|
|
|
|
// If the lowest version comparator has an operator and our version
|
|
// is less than it then it isn't higher than the range
|
|
if ((!low.operator || low.operator === comp) &&
|
|
ltefn(version, low.semver)) {
|
|
return false
|
|
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
exports.prerelease = prerelease
|
|
function prerelease (version, options) {
|
|
var parsed = parse(version, options)
|
|
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
|
|
}
|
|
|
|
exports.intersects = intersects
|
|
function intersects (r1, r2, options) {
|
|
r1 = new Range(r1, options)
|
|
r2 = new Range(r2, options)
|
|
return r1.intersects(r2)
|
|
}
|
|
|
|
exports.coerce = coerce
|
|
function coerce (version, options) {
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version === 'number') {
|
|
version = String(version)
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
options = options || {}
|
|
|
|
var match = null
|
|
if (!options.rtl) {
|
|
match = version.match(re[t.COERCE])
|
|
} else {
|
|
// Find the right-most coercible string that does not share
|
|
// a terminus with a more left-ward coercible string.
|
|
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
|
//
|
|
// Walk through the string checking with a /g regexp
|
|
// Manually set the index so as to pick up overlapping matches.
|
|
// Stop when we get a match that ends at the string end, since no
|
|
// coercible string can be more right-ward without the same terminus.
|
|
var next
|
|
while ((next = re[t.COERCERTL].exec(version)) &&
|
|
(!match || match.index + match[0].length !== version.length)
|
|
) {
|
|
if (!match ||
|
|
next.index + next[0].length !== match.index + match[0].length) {
|
|
match = next
|
|
}
|
|
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
|
}
|
|
// leave it in a clean state
|
|
re[t.COERCERTL].lastIndex = -1
|
|
}
|
|
|
|
if (match === null) {
|
|
return null
|
|
}
|
|
|
|
return parse(match[2] +
|
|
'.' + (match[3] || '0') +
|
|
'.' + (match[4] || '0'), options)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 669:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("util");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 672:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var _a;
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const assert_1 = __webpack_require__(357);
|
|
const fs = __webpack_require__(747);
|
|
const path = __webpack_require__(622);
|
|
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
|
exports.IS_WINDOWS = process.platform === 'win32';
|
|
function exists(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
yield exports.stat(fsPath);
|
|
}
|
|
catch (err) {
|
|
if (err.code === 'ENOENT') {
|
|
return false;
|
|
}
|
|
throw err;
|
|
}
|
|
return true;
|
|
});
|
|
}
|
|
exports.exists = exists;
|
|
function isDirectory(fsPath, useStat = false) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
|
|
return stats.isDirectory();
|
|
});
|
|
}
|
|
exports.isDirectory = isDirectory;
|
|
/**
|
|
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
|
|
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
|
|
*/
|
|
function isRooted(p) {
|
|
p = normalizeSeparators(p);
|
|
if (!p) {
|
|
throw new Error('isRooted() parameter "p" cannot be empty');
|
|
}
|
|
if (exports.IS_WINDOWS) {
|
|
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
|
|
); // e.g. C: or C:\hello
|
|
}
|
|
return p.startsWith('/');
|
|
}
|
|
exports.isRooted = isRooted;
|
|
/**
|
|
* Recursively create a directory at `fsPath`.
|
|
*
|
|
* This implementation is optimistic, meaning it attempts to create the full
|
|
* path first, and backs up the path stack from there.
|
|
*
|
|
* @param fsPath The path to create
|
|
* @param maxDepth The maximum recursion depth
|
|
* @param depth The current recursion depth
|
|
*/
|
|
function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(fsPath, 'a path argument must be provided');
|
|
fsPath = path.resolve(fsPath);
|
|
if (depth >= maxDepth)
|
|
return exports.mkdir(fsPath);
|
|
try {
|
|
yield exports.mkdir(fsPath);
|
|
return;
|
|
}
|
|
catch (err) {
|
|
switch (err.code) {
|
|
case 'ENOENT': {
|
|
yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
|
|
yield exports.mkdir(fsPath);
|
|
return;
|
|
}
|
|
default: {
|
|
let stats;
|
|
try {
|
|
stats = yield exports.stat(fsPath);
|
|
}
|
|
catch (err2) {
|
|
throw err;
|
|
}
|
|
if (!stats.isDirectory())
|
|
throw err;
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.mkdirP = mkdirP;
|
|
/**
|
|
* Best effort attempt to determine whether a file exists and is executable.
|
|
* @param filePath file path to check
|
|
* @param extensions additional file extensions to try
|
|
* @return if file exists and is executable, returns the file path. otherwise empty string.
|
|
*/
|
|
function tryGetExecutablePath(filePath, extensions) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let stats = undefined;
|
|
try {
|
|
// test file exists
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// on Windows, test for valid extension
|
|
const upperExt = path.extname(filePath).toUpperCase();
|
|
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
// try each extension
|
|
const originalFilePath = filePath;
|
|
for (const extension of extensions) {
|
|
filePath = originalFilePath + extension;
|
|
stats = undefined;
|
|
try {
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// preserve the case of the actual file (since an extension was appended)
|
|
try {
|
|
const directory = path.dirname(filePath);
|
|
const upperName = path.basename(filePath).toUpperCase();
|
|
for (const actualName of yield exports.readdir(directory)) {
|
|
if (upperName === actualName.toUpperCase()) {
|
|
filePath = path.join(directory, actualName);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
catch (err) {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
|
|
}
|
|
return filePath;
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return '';
|
|
});
|
|
}
|
|
exports.tryGetExecutablePath = tryGetExecutablePath;
|
|
function normalizeSeparators(p) {
|
|
p = p || '';
|
|
if (exports.IS_WINDOWS) {
|
|
// convert slashes on Windows
|
|
p = p.replace(/\//g, '\\');
|
|
// remove redundant slashes
|
|
return p.replace(/\\\\+/g, '\\');
|
|
}
|
|
// remove redundant slashes
|
|
return p.replace(/\/\/+/g, '/');
|
|
}
|
|
// on Mac/Linux, test the execute bit
|
|
// R W X R W X R W X
|
|
// 256 128 64 32 16 8 4 2 1
|
|
function isUnixExecutable(stats) {
|
|
return ((stats.mode & 1) > 0 ||
|
|
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
|
|
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
|
|
}
|
|
//# sourceMappingURL=io-util.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 674:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticate;
|
|
|
|
const { Deprecation } = __webpack_require__(692);
|
|
const once = __webpack_require__(969);
|
|
|
|
const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation));
|
|
|
|
function authenticate(state, options) {
|
|
deprecateAuthenticate(
|
|
state.octokit.log,
|
|
new Deprecation(
|
|
'[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.'
|
|
)
|
|
);
|
|
|
|
if (!options) {
|
|
state.auth = false;
|
|
return;
|
|
}
|
|
|
|
switch (options.type) {
|
|
case "basic":
|
|
if (!options.username || !options.password) {
|
|
throw new Error(
|
|
"Basic authentication requires both a username and password to be set"
|
|
);
|
|
}
|
|
break;
|
|
|
|
case "oauth":
|
|
if (!options.token && !(options.key && options.secret)) {
|
|
throw new Error(
|
|
"OAuth2 authentication requires a token or key & secret to be set"
|
|
);
|
|
}
|
|
break;
|
|
|
|
case "token":
|
|
case "app":
|
|
if (!options.token) {
|
|
throw new Error("Token authentication requires a token to be set");
|
|
}
|
|
break;
|
|
|
|
default:
|
|
throw new Error(
|
|
"Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'"
|
|
);
|
|
}
|
|
|
|
state.auth = options;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 675:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = function btoa(str) {
|
|
return new Buffer(str).toString('base64')
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 692:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
class Deprecation extends Error {
|
|
constructor(message) {
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
this.name = 'Deprecation';
|
|
}
|
|
|
|
}
|
|
|
|
exports.Deprecation = Deprecation;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 696:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
/*!
|
|
* isobject <https://github.com/jonschlinkert/isobject>
|
|
*
|
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
|
* Released under the MIT License.
|
|
*/
|
|
|
|
function isObject(val) {
|
|
return val != null && typeof val === 'object' && Array.isArray(val) === false;
|
|
}
|
|
|
|
/*!
|
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
|
*
|
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
|
* Released under the MIT License.
|
|
*/
|
|
|
|
function isObjectObject(o) {
|
|
return isObject(o) === true
|
|
&& Object.prototype.toString.call(o) === '[object Object]';
|
|
}
|
|
|
|
function isPlainObject(o) {
|
|
var ctor,prot;
|
|
|
|
if (isObjectObject(o) === false) return false;
|
|
|
|
// If has modified constructor
|
|
ctor = o.constructor;
|
|
if (typeof ctor !== 'function') return false;
|
|
|
|
// If has modified prototype
|
|
prot = ctor.prototype;
|
|
if (isObjectObject(prot) === false) return false;
|
|
|
|
// If constructor does not have an Object-specific method
|
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
|
return false;
|
|
}
|
|
|
|
// Most likely a plain Object
|
|
return true;
|
|
}
|
|
|
|
module.exports = isPlainObject;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 697:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = (promise, onFinally) => {
|
|
onFinally = onFinally || (() => {});
|
|
|
|
return promise.then(
|
|
val => new Promise(resolve => {
|
|
resolve(onFinally());
|
|
}).then(() => val),
|
|
err => new Promise(resolve => {
|
|
resolve(onFinally());
|
|
}).then(() => {
|
|
throw err;
|
|
})
|
|
);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 722:
|
|
/***/ (function(module) {
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
var byteToHex = [];
|
|
for (var i = 0; i < 256; ++i) {
|
|
byteToHex[i] = (i + 0x100).toString(16).substr(1);
|
|
}
|
|
|
|
function bytesToUuid(buf, offset) {
|
|
var i = offset || 0;
|
|
var bth = byteToHex;
|
|
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
|
|
return ([bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]]]).join('');
|
|
}
|
|
|
|
module.exports = bytesToUuid;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 742:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var fs = __webpack_require__(747)
|
|
var core
|
|
if (process.platform === 'win32' || global.TESTING_WINDOWS) {
|
|
core = __webpack_require__(818)
|
|
} else {
|
|
core = __webpack_require__(197)
|
|
}
|
|
|
|
module.exports = isexe
|
|
isexe.sync = sync
|
|
|
|
function isexe (path, options, cb) {
|
|
if (typeof options === 'function') {
|
|
cb = options
|
|
options = {}
|
|
}
|
|
|
|
if (!cb) {
|
|
if (typeof Promise !== 'function') {
|
|
throw new TypeError('callback not provided')
|
|
}
|
|
|
|
return new Promise(function (resolve, reject) {
|
|
isexe(path, options || {}, function (er, is) {
|
|
if (er) {
|
|
reject(er)
|
|
} else {
|
|
resolve(is)
|
|
}
|
|
})
|
|
})
|
|
}
|
|
|
|
core(path, options || {}, function (er, is) {
|
|
// ignore EACCES because that just means we aren't allowed to run it
|
|
if (er) {
|
|
if (er.code === 'EACCES' || options && options.ignoreErrors) {
|
|
er = null
|
|
is = false
|
|
}
|
|
}
|
|
cb(er, is)
|
|
})
|
|
}
|
|
|
|
function sync (path, options) {
|
|
// my kingdom for a filtered catch
|
|
try {
|
|
return core.sync(path, options || {})
|
|
} catch (er) {
|
|
if (options && options.ignoreErrors || er.code === 'EACCES') {
|
|
return false
|
|
} else {
|
|
throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 747:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("fs");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 753:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var endpoint = __webpack_require__(385);
|
|
var universalUserAgent = __webpack_require__(211);
|
|
var isPlainObject = _interopDefault(__webpack_require__(696));
|
|
var nodeFetch = _interopDefault(__webpack_require__(454));
|
|
var requestError = __webpack_require__(463);
|
|
|
|
const VERSION = "5.4.2";
|
|
|
|
function getBufferResponse(response) {
|
|
return response.arrayBuffer();
|
|
}
|
|
|
|
function fetchWrapper(requestOptions) {
|
|
if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
|
|
requestOptions.body = JSON.stringify(requestOptions.body);
|
|
}
|
|
|
|
let headers = {};
|
|
let status;
|
|
let url;
|
|
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
|
|
return fetch(requestOptions.url, Object.assign({
|
|
method: requestOptions.method,
|
|
body: requestOptions.body,
|
|
headers: requestOptions.headers,
|
|
redirect: requestOptions.redirect
|
|
}, requestOptions.request)).then(response => {
|
|
url = response.url;
|
|
status = response.status;
|
|
|
|
for (const keyAndValue of response.headers) {
|
|
headers[keyAndValue[0]] = keyAndValue[1];
|
|
}
|
|
|
|
if (status === 204 || status === 205) {
|
|
return;
|
|
} // GitHub API returns 200 for HEAD requests
|
|
|
|
|
|
if (requestOptions.method === "HEAD") {
|
|
if (status < 400) {
|
|
return;
|
|
}
|
|
|
|
throw new requestError.RequestError(response.statusText, status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
}
|
|
|
|
if (status === 304) {
|
|
throw new requestError.RequestError("Not modified", status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
}
|
|
|
|
if (status >= 400) {
|
|
return response.text().then(message => {
|
|
const error = new requestError.RequestError(message, status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
|
|
try {
|
|
let responseBody = JSON.parse(error.message);
|
|
Object.assign(error, responseBody);
|
|
let errors = responseBody.errors; // Assumption `errors` would always be in Array format
|
|
|
|
error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
|
|
} catch (e) {// ignore, see octokit/rest.js#684
|
|
}
|
|
|
|
throw error;
|
|
});
|
|
}
|
|
|
|
const contentType = response.headers.get("content-type");
|
|
|
|
if (/application\/json/.test(contentType)) {
|
|
return response.json();
|
|
}
|
|
|
|
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
|
|
return response.text();
|
|
}
|
|
|
|
return getBufferResponse(response);
|
|
}).then(data => {
|
|
return {
|
|
status,
|
|
url,
|
|
headers,
|
|
data
|
|
};
|
|
}).catch(error => {
|
|
if (error instanceof requestError.RequestError) {
|
|
throw error;
|
|
}
|
|
|
|
throw new requestError.RequestError(error.message, 500, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
});
|
|
}
|
|
|
|
function withDefaults(oldEndpoint, newDefaults) {
|
|
const endpoint = oldEndpoint.defaults(newDefaults);
|
|
|
|
const newApi = function (route, parameters) {
|
|
const endpointOptions = endpoint.merge(route, parameters);
|
|
|
|
if (!endpointOptions.request || !endpointOptions.request.hook) {
|
|
return fetchWrapper(endpoint.parse(endpointOptions));
|
|
}
|
|
|
|
const request = (route, parameters) => {
|
|
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
|
|
};
|
|
|
|
Object.assign(request, {
|
|
endpoint,
|
|
defaults: withDefaults.bind(null, endpoint)
|
|
});
|
|
return endpointOptions.request.hook(request, endpointOptions);
|
|
};
|
|
|
|
return Object.assign(newApi, {
|
|
endpoint,
|
|
defaults: withDefaults.bind(null, endpoint)
|
|
});
|
|
}
|
|
|
|
const request = withDefaults(endpoint.endpoint, {
|
|
headers: {
|
|
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|
}
|
|
});
|
|
|
|
exports.request = request;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 761:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("zlib");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 763:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = removeHook
|
|
|
|
function removeHook (state, name, method) {
|
|
if (!state.registry[name]) {
|
|
return
|
|
}
|
|
|
|
var index = state.registry[name]
|
|
.map(function (registered) { return registered.orig })
|
|
.indexOf(method)
|
|
|
|
if (index === -1) {
|
|
return
|
|
}
|
|
|
|
state.registry[name].splice(index, 1)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 768:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = function (x) {
|
|
var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt();
|
|
var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt();
|
|
|
|
if (x[x.length - 1] === lf) {
|
|
x = x.slice(0, x.length - 1);
|
|
}
|
|
|
|
if (x[x.length - 1] === cr) {
|
|
x = x.slice(0, x.length - 1);
|
|
}
|
|
|
|
return x;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 777:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = getFirstPage
|
|
|
|
const getPage = __webpack_require__(265)
|
|
|
|
function getFirstPage (octokit, link, headers) {
|
|
return getPage(octokit, link, 'first', headers)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 796:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var osName = _interopDefault(__webpack_require__(2));
|
|
|
|
function getUserAgent() {
|
|
try {
|
|
return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
|
|
} catch (error) {
|
|
if (/wmic os get Caption/.test(error.message)) {
|
|
return "Windows <version undetectable>";
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
exports.getUserAgent = getUserAgent;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 813:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
async function auth(token) {
|
|
const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
|
|
return {
|
|
type: "token",
|
|
token: token,
|
|
tokenType
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Prefix token for usage in the Authorization header
|
|
*
|
|
* @param token OAuth token or JSON Web Token
|
|
*/
|
|
function withAuthorizationPrefix(token) {
|
|
if (token.split(/\./).length === 3) {
|
|
return `bearer ${token}`;
|
|
}
|
|
|
|
return `token ${token}`;
|
|
}
|
|
|
|
async function hook(token, request, route, parameters) {
|
|
const endpoint = request.endpoint.merge(route, parameters);
|
|
endpoint.headers.authorization = withAuthorizationPrefix(token);
|
|
return request(endpoint);
|
|
}
|
|
|
|
const createTokenAuth = function createTokenAuth(token) {
|
|
if (!token) {
|
|
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
|
|
}
|
|
|
|
if (typeof token !== "string") {
|
|
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
|
|
}
|
|
|
|
token = token.replace(/^(token|bearer) +/i, "");
|
|
return Object.assign(auth.bind(null, token), {
|
|
hook: hook.bind(null, token)
|
|
});
|
|
};
|
|
|
|
exports.createTokenAuth = createTokenAuth;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 814:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = which
|
|
which.sync = whichSync
|
|
|
|
var isWindows = process.platform === 'win32' ||
|
|
process.env.OSTYPE === 'cygwin' ||
|
|
process.env.OSTYPE === 'msys'
|
|
|
|
var path = __webpack_require__(622)
|
|
var COLON = isWindows ? ';' : ':'
|
|
var isexe = __webpack_require__(742)
|
|
|
|
function getNotFoundError (cmd) {
|
|
var er = new Error('not found: ' + cmd)
|
|
er.code = 'ENOENT'
|
|
|
|
return er
|
|
}
|
|
|
|
function getPathInfo (cmd, opt) {
|
|
var colon = opt.colon || COLON
|
|
var pathEnv = opt.path || process.env.PATH || ''
|
|
var pathExt = ['']
|
|
|
|
pathEnv = pathEnv.split(colon)
|
|
|
|
var pathExtExe = ''
|
|
if (isWindows) {
|
|
pathEnv.unshift(process.cwd())
|
|
pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
|
|
pathExt = pathExtExe.split(colon)
|
|
|
|
|
|
// Always test the cmd itself first. isexe will check to make sure
|
|
// it's found in the pathExt set.
|
|
if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
|
|
pathExt.unshift('')
|
|
}
|
|
|
|
// If it has a slash, then we don't bother searching the pathenv.
|
|
// just check the file itself, and that's it.
|
|
if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
|
|
pathEnv = ['']
|
|
|
|
return {
|
|
env: pathEnv,
|
|
ext: pathExt,
|
|
extExe: pathExtExe
|
|
}
|
|
}
|
|
|
|
function which (cmd, opt, cb) {
|
|
if (typeof opt === 'function') {
|
|
cb = opt
|
|
opt = {}
|
|
}
|
|
|
|
var info = getPathInfo(cmd, opt)
|
|
var pathEnv = info.env
|
|
var pathExt = info.ext
|
|
var pathExtExe = info.extExe
|
|
var found = []
|
|
|
|
;(function F (i, l) {
|
|
if (i === l) {
|
|
if (opt.all && found.length)
|
|
return cb(null, found)
|
|
else
|
|
return cb(getNotFoundError(cmd))
|
|
}
|
|
|
|
var pathPart = pathEnv[i]
|
|
if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
|
|
pathPart = pathPart.slice(1, -1)
|
|
|
|
var p = path.join(pathPart, cmd)
|
|
if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
|
|
p = cmd.slice(0, 2) + p
|
|
}
|
|
;(function E (ii, ll) {
|
|
if (ii === ll) return F(i + 1, l)
|
|
var ext = pathExt[ii]
|
|
isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
|
|
if (!er && is) {
|
|
if (opt.all)
|
|
found.push(p + ext)
|
|
else
|
|
return cb(null, p + ext)
|
|
}
|
|
return E(ii + 1, ll)
|
|
})
|
|
})(0, pathExt.length)
|
|
})(0, pathEnv.length)
|
|
}
|
|
|
|
function whichSync (cmd, opt) {
|
|
opt = opt || {}
|
|
|
|
var info = getPathInfo(cmd, opt)
|
|
var pathEnv = info.env
|
|
var pathExt = info.ext
|
|
var pathExtExe = info.extExe
|
|
var found = []
|
|
|
|
for (var i = 0, l = pathEnv.length; i < l; i ++) {
|
|
var pathPart = pathEnv[i]
|
|
if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
|
|
pathPart = pathPart.slice(1, -1)
|
|
|
|
var p = path.join(pathPart, cmd)
|
|
if (!pathPart && /^\.[\\\/]/.test(cmd)) {
|
|
p = cmd.slice(0, 2) + p
|
|
}
|
|
for (var j = 0, ll = pathExt.length; j < ll; j ++) {
|
|
var cur = p + pathExt[j]
|
|
var is
|
|
try {
|
|
is = isexe.sync(cur, { pathExt: pathExtExe })
|
|
if (is) {
|
|
if (opt.all)
|
|
found.push(cur)
|
|
else
|
|
return cur
|
|
}
|
|
} catch (ex) {}
|
|
}
|
|
}
|
|
|
|
if (opt.all && found.length)
|
|
return found
|
|
|
|
if (opt.nothrow)
|
|
return null
|
|
|
|
throw getNotFoundError(cmd)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 816:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = /^#!.*/;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 818:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = isexe
|
|
isexe.sync = sync
|
|
|
|
var fs = __webpack_require__(747)
|
|
|
|
function checkPathExt (path, options) {
|
|
var pathext = options.pathExt !== undefined ?
|
|
options.pathExt : process.env.PATHEXT
|
|
|
|
if (!pathext) {
|
|
return true
|
|
}
|
|
|
|
pathext = pathext.split(';')
|
|
if (pathext.indexOf('') !== -1) {
|
|
return true
|
|
}
|
|
for (var i = 0; i < pathext.length; i++) {
|
|
var p = pathext[i].toLowerCase()
|
|
if (p && path.substr(-p.length).toLowerCase() === p) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
function checkStat (stat, path, options) {
|
|
if (!stat.isSymbolicLink() && !stat.isFile()) {
|
|
return false
|
|
}
|
|
return checkPathExt(path, options)
|
|
}
|
|
|
|
function isexe (path, options, cb) {
|
|
fs.stat(path, function (er, stat) {
|
|
cb(er, er ? false : checkStat(stat, path, options))
|
|
})
|
|
}
|
|
|
|
function sync (path, options) {
|
|
return checkStat(fs.statSync(path), path, options)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 821:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const fsHelper = __importStar(__webpack_require__(618));
|
|
const github = __importStar(__webpack_require__(469));
|
|
const path = __importStar(__webpack_require__(622));
|
|
function getInputs() {
|
|
const result = {};
|
|
// GitHub workspace
|
|
let githubWorkspacePath = process.env['GITHUB_WORKSPACE'];
|
|
if (!githubWorkspacePath) {
|
|
throw new Error('GITHUB_WORKSPACE not defined');
|
|
}
|
|
githubWorkspacePath = path.resolve(githubWorkspacePath);
|
|
core.debug(`GITHUB_WORKSPACE = '${githubWorkspacePath}'`);
|
|
fsHelper.directoryExistsSync(githubWorkspacePath, true);
|
|
// Qualified repository
|
|
const qualifiedRepository = core.getInput('repository') ||
|
|
`${github.context.repo.owner}/${github.context.repo.repo}`;
|
|
core.debug(`qualified repository = '${qualifiedRepository}'`);
|
|
const splitRepository = qualifiedRepository.split('/');
|
|
if (splitRepository.length !== 2 ||
|
|
!splitRepository[0] ||
|
|
!splitRepository[1]) {
|
|
throw new Error(`Invalid repository '${qualifiedRepository}'. Expected format {owner}/{repo}.`);
|
|
}
|
|
result.repositoryOwner = splitRepository[0];
|
|
result.repositoryName = splitRepository[1];
|
|
// Repository path
|
|
result.repositoryPath = core.getInput('path') || '.';
|
|
result.repositoryPath = path.resolve(githubWorkspacePath, result.repositoryPath);
|
|
if (!(result.repositoryPath + path.sep).startsWith(githubWorkspacePath + path.sep)) {
|
|
throw new Error(`Repository path '${result.repositoryPath}' is not under '${githubWorkspacePath}'`);
|
|
}
|
|
// Workflow repository?
|
|
const isWorkflowRepository = qualifiedRepository.toUpperCase() ===
|
|
`${github.context.repo.owner}/${github.context.repo.repo}`.toUpperCase();
|
|
// Source branch, source version
|
|
result.ref = core.getInput('ref');
|
|
if (!result.ref) {
|
|
if (isWorkflowRepository) {
|
|
result.ref = github.context.ref;
|
|
result.commit = github.context.sha;
|
|
// Some events have an unqualifed ref. For example when a PR is merged (pull_request closed event),
|
|
// the ref is unqualifed like "main" instead of "refs/heads/main".
|
|
if (result.commit && result.ref && !result.ref.startsWith('refs/')) {
|
|
result.ref = `refs/heads/${result.ref}`;
|
|
}
|
|
}
|
|
}
|
|
// SHA?
|
|
else if (result.ref.match(/^[0-9a-fA-F]{40}$/)) {
|
|
result.commit = result.ref;
|
|
result.ref = '';
|
|
}
|
|
core.debug(`ref = '${result.ref}'`);
|
|
core.debug(`commit = '${result.commit}'`);
|
|
// Clean
|
|
result.clean = (core.getInput('clean') || 'true').toUpperCase() === 'TRUE';
|
|
core.debug(`clean = ${result.clean}`);
|
|
// Fetch depth
|
|
result.fetchDepth = Math.floor(Number(core.getInput('fetch-depth') || '1'));
|
|
if (isNaN(result.fetchDepth) || result.fetchDepth < 0) {
|
|
result.fetchDepth = 0;
|
|
}
|
|
core.debug(`fetch depth = ${result.fetchDepth}`);
|
|
// LFS
|
|
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE';
|
|
core.debug(`lfs = ${result.lfs}`);
|
|
// Submodules
|
|
result.submodules = false;
|
|
result.nestedSubmodules = false;
|
|
const submodulesString = (core.getInput('submodules') || '').toUpperCase();
|
|
if (submodulesString == 'RECURSIVE') {
|
|
result.submodules = true;
|
|
result.nestedSubmodules = true;
|
|
}
|
|
else if (submodulesString == 'TRUE') {
|
|
result.submodules = true;
|
|
}
|
|
core.debug(`submodules = ${result.submodules}`);
|
|
core.debug(`recursive submodules = ${result.nestedSubmodules}`);
|
|
// Auth token
|
|
result.authToken = core.getInput('token', { required: true });
|
|
// SSH
|
|
result.sshKey = core.getInput('ssh-key');
|
|
result.sshKnownHosts = core.getInput('ssh-known-hosts');
|
|
result.sshStrict =
|
|
(core.getInput('ssh-strict') || 'true').toUpperCase() === 'TRUE';
|
|
// Persist credentials
|
|
result.persistCredentials =
|
|
(core.getInput('persist-credentials') || 'false').toUpperCase() === 'TRUE';
|
|
return result;
|
|
}
|
|
exports.getInputs = getInputs;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 826:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var rng = __webpack_require__(139);
|
|
var bytesToUuid = __webpack_require__(722);
|
|
|
|
function v4(options, buf, offset) {
|
|
var i = buf && offset || 0;
|
|
|
|
if (typeof(options) == 'string') {
|
|
buf = options === 'binary' ? new Array(16) : null;
|
|
options = null;
|
|
}
|
|
options = options || {};
|
|
|
|
var rnds = options.random || (options.rng || rng)();
|
|
|
|
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
|
rnds[6] = (rnds[6] & 0x0f) | 0x40;
|
|
rnds[8] = (rnds[8] & 0x3f) | 0x80;
|
|
|
|
// Copy bytes to buffer, if provided
|
|
if (buf) {
|
|
for (var ii = 0; ii < 16; ++ii) {
|
|
buf[i + ii] = rnds[ii];
|
|
}
|
|
}
|
|
|
|
return buf || bytesToUuid(rnds);
|
|
}
|
|
|
|
module.exports = v4;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 835:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("url");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 842:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
var deprecation = __webpack_require__(692);
|
|
|
|
var endpointsByScope = {
|
|
actions: {
|
|
cancelWorkflowRun: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id/cancel"
|
|
},
|
|
createOrUpdateSecretForRepo: {
|
|
method: "PUT",
|
|
params: {
|
|
encrypted_value: {
|
|
type: "string"
|
|
},
|
|
key_id: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/secrets/:name"
|
|
},
|
|
createRegistrationToken: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners/registration-token"
|
|
},
|
|
createRemoveToken: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners/remove-token"
|
|
},
|
|
deleteArtifact: {
|
|
method: "DELETE",
|
|
params: {
|
|
artifact_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
|
|
},
|
|
deleteSecretFromRepo: {
|
|
method: "DELETE",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/secrets/:name"
|
|
},
|
|
downloadArtifact: {
|
|
method: "GET",
|
|
params: {
|
|
archive_format: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
artifact_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format"
|
|
},
|
|
getArtifact: {
|
|
method: "GET",
|
|
params: {
|
|
artifact_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/artifacts/:artifact_id"
|
|
},
|
|
getPublicKey: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/secrets/public-key"
|
|
},
|
|
getSecret: {
|
|
method: "GET",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/secrets/:name"
|
|
},
|
|
getSelfHostedRunner: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
runner_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners/:runner_id"
|
|
},
|
|
getWorkflow: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
workflow_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/workflows/:workflow_id"
|
|
},
|
|
getWorkflowJob: {
|
|
method: "GET",
|
|
params: {
|
|
job_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/jobs/:job_id"
|
|
},
|
|
getWorkflowRun: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id"
|
|
},
|
|
listDownloadsForSelfHostedRunnerApplication: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners/downloads"
|
|
},
|
|
listJobsForWorkflowRun: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id/jobs"
|
|
},
|
|
listRepoWorkflowRuns: {
|
|
method: "GET",
|
|
params: {
|
|
actor: {
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
event: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["completed", "status", "conclusion"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs"
|
|
},
|
|
listRepoWorkflows: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/workflows"
|
|
},
|
|
listSecretsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/secrets"
|
|
},
|
|
listSelfHostedRunnersForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners"
|
|
},
|
|
listWorkflowJobLogs: {
|
|
method: "GET",
|
|
params: {
|
|
job_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/jobs/:job_id/logs"
|
|
},
|
|
listWorkflowRunArtifacts: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts"
|
|
},
|
|
listWorkflowRunLogs: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id/logs"
|
|
},
|
|
listWorkflowRuns: {
|
|
method: "GET",
|
|
params: {
|
|
actor: {
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
event: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["completed", "status", "conclusion"],
|
|
type: "string"
|
|
},
|
|
workflow_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs"
|
|
},
|
|
reRunWorkflow: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runs/:run_id/rerun"
|
|
},
|
|
removeSelfHostedRunner: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
runner_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/actions/runners/:runner_id"
|
|
}
|
|
},
|
|
activity: {
|
|
checkStarringRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/starred/:owner/:repo"
|
|
},
|
|
deleteRepoSubscription: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/subscription"
|
|
},
|
|
deleteThreadSubscription: {
|
|
method: "DELETE",
|
|
params: {
|
|
thread_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/notifications/threads/:thread_id/subscription"
|
|
},
|
|
getRepoSubscription: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/subscription"
|
|
},
|
|
getThread: {
|
|
method: "GET",
|
|
params: {
|
|
thread_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/notifications/threads/:thread_id"
|
|
},
|
|
getThreadSubscription: {
|
|
method: "GET",
|
|
params: {
|
|
thread_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/notifications/threads/:thread_id/subscription"
|
|
},
|
|
listEventsForOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/events/orgs/:org"
|
|
},
|
|
listEventsForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/events"
|
|
},
|
|
listFeeds: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/feeds"
|
|
},
|
|
listNotifications: {
|
|
method: "GET",
|
|
params: {
|
|
all: {
|
|
type: "boolean"
|
|
},
|
|
before: {
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
participating: {
|
|
type: "boolean"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/notifications"
|
|
},
|
|
listNotificationsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
all: {
|
|
type: "boolean"
|
|
},
|
|
before: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
participating: {
|
|
type: "boolean"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/notifications"
|
|
},
|
|
listPublicEvents: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/events"
|
|
},
|
|
listPublicEventsForOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/events"
|
|
},
|
|
listPublicEventsForRepoNetwork: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/networks/:owner/:repo/events"
|
|
},
|
|
listPublicEventsForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/events/public"
|
|
},
|
|
listReceivedEventsForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/received_events"
|
|
},
|
|
listReceivedPublicEventsForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/received_events/public"
|
|
},
|
|
listRepoEvents: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/events"
|
|
},
|
|
listReposStarredByAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/starred"
|
|
},
|
|
listReposStarredByUser: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/starred"
|
|
},
|
|
listReposWatchedByUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/subscriptions"
|
|
},
|
|
listStargazersForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stargazers"
|
|
},
|
|
listWatchedReposForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/subscriptions"
|
|
},
|
|
listWatchersForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/subscribers"
|
|
},
|
|
markAsRead: {
|
|
method: "PUT",
|
|
params: {
|
|
last_read_at: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/notifications"
|
|
},
|
|
markNotificationsAsReadForRepo: {
|
|
method: "PUT",
|
|
params: {
|
|
last_read_at: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/notifications"
|
|
},
|
|
markThreadAsRead: {
|
|
method: "PATCH",
|
|
params: {
|
|
thread_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/notifications/threads/:thread_id"
|
|
},
|
|
setRepoSubscription: {
|
|
method: "PUT",
|
|
params: {
|
|
ignored: {
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
subscribed: {
|
|
type: "boolean"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/subscription"
|
|
},
|
|
setThreadSubscription: {
|
|
method: "PUT",
|
|
params: {
|
|
ignored: {
|
|
type: "boolean"
|
|
},
|
|
thread_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/notifications/threads/:thread_id/subscription"
|
|
},
|
|
starRepo: {
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/starred/:owner/:repo"
|
|
},
|
|
unstarRepo: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/starred/:owner/:repo"
|
|
}
|
|
},
|
|
apps: {
|
|
addRepoToInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repository_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/installations/:installation_id/repositories/:repository_id"
|
|
},
|
|
checkAccountIsAssociatedWithAny: {
|
|
method: "GET",
|
|
params: {
|
|
account_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/accounts/:account_id"
|
|
},
|
|
checkAccountIsAssociatedWithAnyStubbed: {
|
|
method: "GET",
|
|
params: {
|
|
account_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/stubbed/accounts/:account_id"
|
|
},
|
|
checkAuthorization: {
|
|
deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization",
|
|
method: "GET",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
checkToken: {
|
|
headers: {
|
|
accept: "application/vnd.github.doctor-strange-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
access_token: {
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/token"
|
|
},
|
|
createContentAttachment: {
|
|
headers: {
|
|
accept: "application/vnd.github.corsair-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
content_reference_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/content_references/:content_reference_id/attachments"
|
|
},
|
|
createFromManifest: {
|
|
headers: {
|
|
accept: "application/vnd.github.fury-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
code: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/app-manifests/:code/conversions"
|
|
},
|
|
createInstallationToken: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
permissions: {
|
|
type: "object"
|
|
},
|
|
repository_ids: {
|
|
type: "integer[]"
|
|
}
|
|
},
|
|
url: "/app/installations/:installation_id/access_tokens"
|
|
},
|
|
deleteAuthorization: {
|
|
headers: {
|
|
accept: "application/vnd.github.doctor-strange-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/grant"
|
|
},
|
|
deleteInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/app/installations/:installation_id"
|
|
},
|
|
deleteToken: {
|
|
headers: {
|
|
accept: "application/vnd.github.doctor-strange-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/token"
|
|
},
|
|
findOrgInstallation: {
|
|
deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)",
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/installation"
|
|
},
|
|
findRepoInstallation: {
|
|
deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)",
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/installation"
|
|
},
|
|
findUserInstallation: {
|
|
deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)",
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/installation"
|
|
},
|
|
getAuthenticated: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {},
|
|
url: "/app"
|
|
},
|
|
getBySlug: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
app_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/apps/:app_slug"
|
|
},
|
|
getInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/app/installations/:installation_id"
|
|
},
|
|
getOrgInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/installation"
|
|
},
|
|
getRepoInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/installation"
|
|
},
|
|
getUserInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/installation"
|
|
},
|
|
listAccountsUserOrOrgOnPlan: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
plan_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/plans/:plan_id/accounts"
|
|
},
|
|
listAccountsUserOrOrgOnPlanStubbed: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
plan_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/stubbed/plans/:plan_id/accounts"
|
|
},
|
|
listInstallationReposForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/installations/:installation_id/repositories"
|
|
},
|
|
listInstallations: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/app/installations"
|
|
},
|
|
listInstallationsForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/installations"
|
|
},
|
|
listMarketplacePurchasesForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/marketplace_purchases"
|
|
},
|
|
listMarketplacePurchasesForAuthenticatedUserStubbed: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/marketplace_purchases/stubbed"
|
|
},
|
|
listPlans: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/plans"
|
|
},
|
|
listPlansStubbed: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/marketplace_listing/stubbed/plans"
|
|
},
|
|
listRepos: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/installation/repositories"
|
|
},
|
|
removeRepoFromInstallation: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
installation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repository_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/installations/:installation_id/repositories/:repository_id"
|
|
},
|
|
resetAuthorization: {
|
|
deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization",
|
|
method: "POST",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
resetToken: {
|
|
headers: {
|
|
accept: "application/vnd.github.doctor-strange-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
access_token: {
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/token"
|
|
},
|
|
revokeAuthorizationForApplication: {
|
|
deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application",
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
revokeGrantForApplication: {
|
|
deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application",
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/grants/:access_token"
|
|
},
|
|
revokeInstallationToken: {
|
|
headers: {
|
|
accept: "application/vnd.github.gambit-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {},
|
|
url: "/installation/token"
|
|
}
|
|
},
|
|
checks: {
|
|
create: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
actions: {
|
|
type: "object[]"
|
|
},
|
|
"actions[].description": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"actions[].identifier": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"actions[].label": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
completed_at: {
|
|
type: "string"
|
|
},
|
|
conclusion: {
|
|
enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
|
|
type: "string"
|
|
},
|
|
details_url: {
|
|
type: "string"
|
|
},
|
|
external_id: {
|
|
type: "string"
|
|
},
|
|
head_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
output: {
|
|
type: "object"
|
|
},
|
|
"output.annotations": {
|
|
type: "object[]"
|
|
},
|
|
"output.annotations[].annotation_level": {
|
|
enum: ["notice", "warning", "failure"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].end_column": {
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].end_line": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].message": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].path": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].raw_details": {
|
|
type: "string"
|
|
},
|
|
"output.annotations[].start_column": {
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].start_line": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].title": {
|
|
type: "string"
|
|
},
|
|
"output.images": {
|
|
type: "object[]"
|
|
},
|
|
"output.images[].alt": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.images[].caption": {
|
|
type: "string"
|
|
},
|
|
"output.images[].image_url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.summary": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.text": {
|
|
type: "string"
|
|
},
|
|
"output.title": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
started_at: {
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["queued", "in_progress", "completed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-runs"
|
|
},
|
|
createSuite: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
head_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-suites"
|
|
},
|
|
get: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
check_run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-runs/:check_run_id"
|
|
},
|
|
getSuite: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
check_suite_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-suites/:check_suite_id"
|
|
},
|
|
listAnnotations: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
check_run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations"
|
|
},
|
|
listForRef: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
check_name: {
|
|
type: "string"
|
|
},
|
|
filter: {
|
|
enum: ["latest", "all"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["queued", "in_progress", "completed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref/check-runs"
|
|
},
|
|
listForSuite: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
check_name: {
|
|
type: "string"
|
|
},
|
|
check_suite_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
filter: {
|
|
enum: ["latest", "all"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["queued", "in_progress", "completed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs"
|
|
},
|
|
listSuitesForRef: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
app_id: {
|
|
type: "integer"
|
|
},
|
|
check_name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref/check-suites"
|
|
},
|
|
rerequestSuite: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
check_suite_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest"
|
|
},
|
|
setSuitesPreferences: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
auto_trigger_checks: {
|
|
type: "object[]"
|
|
},
|
|
"auto_trigger_checks[].app_id": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"auto_trigger_checks[].setting": {
|
|
required: true,
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-suites/preferences"
|
|
},
|
|
update: {
|
|
headers: {
|
|
accept: "application/vnd.github.antiope-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
actions: {
|
|
type: "object[]"
|
|
},
|
|
"actions[].description": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"actions[].identifier": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"actions[].label": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
check_run_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
completed_at: {
|
|
type: "string"
|
|
},
|
|
conclusion: {
|
|
enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"],
|
|
type: "string"
|
|
},
|
|
details_url: {
|
|
type: "string"
|
|
},
|
|
external_id: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
output: {
|
|
type: "object"
|
|
},
|
|
"output.annotations": {
|
|
type: "object[]"
|
|
},
|
|
"output.annotations[].annotation_level": {
|
|
enum: ["notice", "warning", "failure"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].end_column": {
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].end_line": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].message": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].path": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.annotations[].raw_details": {
|
|
type: "string"
|
|
},
|
|
"output.annotations[].start_column": {
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].start_line": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"output.annotations[].title": {
|
|
type: "string"
|
|
},
|
|
"output.images": {
|
|
type: "object[]"
|
|
},
|
|
"output.images[].alt": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.images[].caption": {
|
|
type: "string"
|
|
},
|
|
"output.images[].image_url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.summary": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"output.text": {
|
|
type: "string"
|
|
},
|
|
"output.title": {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
started_at: {
|
|
type: "string"
|
|
},
|
|
status: {
|
|
enum: ["queued", "in_progress", "completed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/check-runs/:check_run_id"
|
|
}
|
|
},
|
|
codesOfConduct: {
|
|
getConductCode: {
|
|
headers: {
|
|
accept: "application/vnd.github.scarlet-witch-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
key: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/codes_of_conduct/:key"
|
|
},
|
|
getForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.scarlet-witch-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/community/code_of_conduct"
|
|
},
|
|
listConductCodes: {
|
|
headers: {
|
|
accept: "application/vnd.github.scarlet-witch-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {},
|
|
url: "/codes_of_conduct"
|
|
}
|
|
},
|
|
emojis: {
|
|
get: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/emojis"
|
|
}
|
|
},
|
|
gists: {
|
|
checkIsStarred: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/star"
|
|
},
|
|
create: {
|
|
method: "POST",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
files: {
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"files.content": {
|
|
type: "string"
|
|
},
|
|
public: {
|
|
type: "boolean"
|
|
}
|
|
},
|
|
url: "/gists"
|
|
},
|
|
createComment: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/comments"
|
|
},
|
|
delete: {
|
|
method: "DELETE",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id"
|
|
},
|
|
deleteComment: {
|
|
method: "DELETE",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/comments/:comment_id"
|
|
},
|
|
fork: {
|
|
method: "POST",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/forks"
|
|
},
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id"
|
|
},
|
|
getComment: {
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/comments/:comment_id"
|
|
},
|
|
getRevision: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/:sha"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists"
|
|
},
|
|
listComments: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/comments"
|
|
},
|
|
listCommits: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/commits"
|
|
},
|
|
listForks: {
|
|
method: "GET",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/forks"
|
|
},
|
|
listPublic: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/public"
|
|
},
|
|
listPublicForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/gists"
|
|
},
|
|
listStarred: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/starred"
|
|
},
|
|
star: {
|
|
method: "PUT",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/star"
|
|
},
|
|
unstar: {
|
|
method: "DELETE",
|
|
params: {
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/star"
|
|
},
|
|
update: {
|
|
method: "PATCH",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
files: {
|
|
type: "object"
|
|
},
|
|
"files.content": {
|
|
type: "string"
|
|
},
|
|
"files.filename": {
|
|
type: "string"
|
|
},
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id"
|
|
},
|
|
updateComment: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
gist_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gists/:gist_id/comments/:comment_id"
|
|
}
|
|
},
|
|
git: {
|
|
createBlob: {
|
|
method: "POST",
|
|
params: {
|
|
content: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
encoding: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/blobs"
|
|
},
|
|
createCommit: {
|
|
method: "POST",
|
|
params: {
|
|
author: {
|
|
type: "object"
|
|
},
|
|
"author.date": {
|
|
type: "string"
|
|
},
|
|
"author.email": {
|
|
type: "string"
|
|
},
|
|
"author.name": {
|
|
type: "string"
|
|
},
|
|
committer: {
|
|
type: "object"
|
|
},
|
|
"committer.date": {
|
|
type: "string"
|
|
},
|
|
"committer.email": {
|
|
type: "string"
|
|
},
|
|
"committer.name": {
|
|
type: "string"
|
|
},
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
parents: {
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
signature: {
|
|
type: "string"
|
|
},
|
|
tree: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/commits"
|
|
},
|
|
createRef: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/refs"
|
|
},
|
|
createTag: {
|
|
method: "POST",
|
|
params: {
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
object: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tag: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tagger: {
|
|
type: "object"
|
|
},
|
|
"tagger.date": {
|
|
type: "string"
|
|
},
|
|
"tagger.email": {
|
|
type: "string"
|
|
},
|
|
"tagger.name": {
|
|
type: "string"
|
|
},
|
|
type: {
|
|
enum: ["commit", "tree", "blob"],
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/tags"
|
|
},
|
|
createTree: {
|
|
method: "POST",
|
|
params: {
|
|
base_tree: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tree: {
|
|
required: true,
|
|
type: "object[]"
|
|
},
|
|
"tree[].content": {
|
|
type: "string"
|
|
},
|
|
"tree[].mode": {
|
|
enum: ["100644", "100755", "040000", "160000", "120000"],
|
|
type: "string"
|
|
},
|
|
"tree[].path": {
|
|
type: "string"
|
|
},
|
|
"tree[].sha": {
|
|
allowNull: true,
|
|
type: "string"
|
|
},
|
|
"tree[].type": {
|
|
enum: ["blob", "tree", "commit"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/trees"
|
|
},
|
|
deleteRef: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/refs/:ref"
|
|
},
|
|
getBlob: {
|
|
method: "GET",
|
|
params: {
|
|
file_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/blobs/:file_sha"
|
|
},
|
|
getCommit: {
|
|
method: "GET",
|
|
params: {
|
|
commit_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/commits/:commit_sha"
|
|
},
|
|
getRef: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/ref/:ref"
|
|
},
|
|
getTag: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tag_sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/tags/:tag_sha"
|
|
},
|
|
getTree: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
recursive: {
|
|
enum: ["1"],
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tree_sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/trees/:tree_sha"
|
|
},
|
|
listMatchingRefs: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/matching-refs/:ref"
|
|
},
|
|
listRefs: {
|
|
method: "GET",
|
|
params: {
|
|
namespace: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/refs/:namespace"
|
|
},
|
|
updateRef: {
|
|
method: "PATCH",
|
|
params: {
|
|
force: {
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/git/refs/:ref"
|
|
}
|
|
},
|
|
gitignore: {
|
|
getTemplate: {
|
|
method: "GET",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/gitignore/templates/:name"
|
|
},
|
|
listTemplates: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/gitignore/templates"
|
|
}
|
|
},
|
|
interactions: {
|
|
addOrUpdateRestrictionsForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
limit: {
|
|
enum: ["existing_users", "contributors_only", "collaborators_only"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/interaction-limits"
|
|
},
|
|
addOrUpdateRestrictionsForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
limit: {
|
|
enum: ["existing_users", "contributors_only", "collaborators_only"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/interaction-limits"
|
|
},
|
|
getRestrictionsForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/interaction-limits"
|
|
},
|
|
getRestrictionsForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/interaction-limits"
|
|
},
|
|
removeRestrictionsForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/interaction-limits"
|
|
},
|
|
removeRestrictionsForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.sombra-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/interaction-limits"
|
|
}
|
|
},
|
|
issues: {
|
|
addAssignees: {
|
|
method: "POST",
|
|
params: {
|
|
assignees: {
|
|
type: "string[]"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/assignees"
|
|
},
|
|
addLabels: {
|
|
method: "POST",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
labels: {
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/labels"
|
|
},
|
|
checkAssignee: {
|
|
method: "GET",
|
|
params: {
|
|
assignee: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/assignees/:assignee"
|
|
},
|
|
create: {
|
|
method: "POST",
|
|
params: {
|
|
assignee: {
|
|
type: "string"
|
|
},
|
|
assignees: {
|
|
type: "string[]"
|
|
},
|
|
body: {
|
|
type: "string"
|
|
},
|
|
labels: {
|
|
type: "string[]"
|
|
},
|
|
milestone: {
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues"
|
|
},
|
|
createComment: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/comments"
|
|
},
|
|
createLabel: {
|
|
method: "POST",
|
|
params: {
|
|
color: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/labels"
|
|
},
|
|
createMilestone: {
|
|
method: "POST",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
due_on: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed"],
|
|
type: "string"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones"
|
|
},
|
|
deleteComment: {
|
|
method: "DELETE",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments/:comment_id"
|
|
},
|
|
deleteLabel: {
|
|
method: "DELETE",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/labels/:name"
|
|
},
|
|
deleteMilestone: {
|
|
method: "DELETE",
|
|
params: {
|
|
milestone_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "milestone_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones/:milestone_number"
|
|
},
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number"
|
|
},
|
|
getComment: {
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments/:comment_id"
|
|
},
|
|
getEvent: {
|
|
method: "GET",
|
|
params: {
|
|
event_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/events/:event_id"
|
|
},
|
|
getLabel: {
|
|
method: "GET",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/labels/:name"
|
|
},
|
|
getMilestone: {
|
|
method: "GET",
|
|
params: {
|
|
milestone_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "milestone_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones/:milestone_number"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
filter: {
|
|
enum: ["assigned", "created", "mentioned", "subscribed", "all"],
|
|
type: "string"
|
|
},
|
|
labels: {
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "comments"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/issues"
|
|
},
|
|
listAssignees: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/assignees"
|
|
},
|
|
listComments: {
|
|
method: "GET",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/comments"
|
|
},
|
|
listCommentsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments"
|
|
},
|
|
listEvents: {
|
|
method: "GET",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/events"
|
|
},
|
|
listEventsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/events"
|
|
},
|
|
listEventsForTimeline: {
|
|
headers: {
|
|
accept: "application/vnd.github.mockingbird-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/timeline"
|
|
},
|
|
listForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
filter: {
|
|
enum: ["assigned", "created", "mentioned", "subscribed", "all"],
|
|
type: "string"
|
|
},
|
|
labels: {
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "comments"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/issues"
|
|
},
|
|
listForOrg: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
filter: {
|
|
enum: ["assigned", "created", "mentioned", "subscribed", "all"],
|
|
type: "string"
|
|
},
|
|
labels: {
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "comments"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/issues"
|
|
},
|
|
listForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
assignee: {
|
|
type: "string"
|
|
},
|
|
creator: {
|
|
type: "string"
|
|
},
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
labels: {
|
|
type: "string"
|
|
},
|
|
mentioned: {
|
|
type: "string"
|
|
},
|
|
milestone: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "comments"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues"
|
|
},
|
|
listLabelsForMilestone: {
|
|
method: "GET",
|
|
params: {
|
|
milestone_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "milestone_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones/:milestone_number/labels"
|
|
},
|
|
listLabelsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/labels"
|
|
},
|
|
listLabelsOnIssue: {
|
|
method: "GET",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/labels"
|
|
},
|
|
listMilestonesForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["due_on", "completeness"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones"
|
|
},
|
|
lock: {
|
|
method: "PUT",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
lock_reason: {
|
|
enum: ["off-topic", "too heated", "resolved", "spam"],
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/lock"
|
|
},
|
|
removeAssignees: {
|
|
method: "DELETE",
|
|
params: {
|
|
assignees: {
|
|
type: "string[]"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/assignees"
|
|
},
|
|
removeLabel: {
|
|
method: "DELETE",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/labels/:name"
|
|
},
|
|
removeLabels: {
|
|
method: "DELETE",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/labels"
|
|
},
|
|
replaceLabels: {
|
|
method: "PUT",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
labels: {
|
|
type: "string[]"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/labels"
|
|
},
|
|
unlock: {
|
|
method: "DELETE",
|
|
params: {
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/lock"
|
|
},
|
|
update: {
|
|
method: "PATCH",
|
|
params: {
|
|
assignee: {
|
|
type: "string"
|
|
},
|
|
assignees: {
|
|
type: "string[]"
|
|
},
|
|
body: {
|
|
type: "string"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
labels: {
|
|
type: "string[]"
|
|
},
|
|
milestone: {
|
|
allowNull: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed"],
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number"
|
|
},
|
|
updateComment: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments/:comment_id"
|
|
},
|
|
updateLabel: {
|
|
method: "PATCH",
|
|
params: {
|
|
color: {
|
|
type: "string"
|
|
},
|
|
current_name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/labels/:current_name"
|
|
},
|
|
updateMilestone: {
|
|
method: "PATCH",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
due_on: {
|
|
type: "string"
|
|
},
|
|
milestone_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "milestone_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed"],
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/milestones/:milestone_number"
|
|
}
|
|
},
|
|
licenses: {
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
license: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/licenses/:license"
|
|
},
|
|
getForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/license"
|
|
},
|
|
list: {
|
|
deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)",
|
|
method: "GET",
|
|
params: {},
|
|
url: "/licenses"
|
|
},
|
|
listCommonlyUsed: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/licenses"
|
|
}
|
|
},
|
|
markdown: {
|
|
render: {
|
|
method: "POST",
|
|
params: {
|
|
context: {
|
|
type: "string"
|
|
},
|
|
mode: {
|
|
enum: ["markdown", "gfm"],
|
|
type: "string"
|
|
},
|
|
text: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/markdown"
|
|
},
|
|
renderRaw: {
|
|
headers: {
|
|
"content-type": "text/plain; charset=utf-8"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
data: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/markdown/raw"
|
|
}
|
|
},
|
|
meta: {
|
|
get: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/meta"
|
|
}
|
|
},
|
|
migrations: {
|
|
cancelImport: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import"
|
|
},
|
|
deleteArchiveForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/migrations/:migration_id/archive"
|
|
},
|
|
deleteArchiveForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id/archive"
|
|
},
|
|
downloadArchiveForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id/archive"
|
|
},
|
|
getArchiveForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/migrations/:migration_id/archive"
|
|
},
|
|
getArchiveForOrg: {
|
|
deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)",
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id/archive"
|
|
},
|
|
getCommitAuthors: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import/authors"
|
|
},
|
|
getImportProgress: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import"
|
|
},
|
|
getLargeFiles: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import/large_files"
|
|
},
|
|
getStatusForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/migrations/:migration_id"
|
|
},
|
|
getStatusForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id"
|
|
},
|
|
listForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/migrations"
|
|
},
|
|
listForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations"
|
|
},
|
|
listReposForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id/repositories"
|
|
},
|
|
listReposForUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/:migration_id/repositories"
|
|
},
|
|
mapCommitAuthor: {
|
|
method: "PATCH",
|
|
params: {
|
|
author_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
email: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import/authors/:author_id"
|
|
},
|
|
setLfsPreference: {
|
|
method: "PATCH",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
use_lfs: {
|
|
enum: ["opt_in", "opt_out"],
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import/lfs"
|
|
},
|
|
startForAuthenticatedUser: {
|
|
method: "POST",
|
|
params: {
|
|
exclude_attachments: {
|
|
type: "boolean"
|
|
},
|
|
lock_repositories: {
|
|
type: "boolean"
|
|
},
|
|
repositories: {
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/user/migrations"
|
|
},
|
|
startForOrg: {
|
|
method: "POST",
|
|
params: {
|
|
exclude_attachments: {
|
|
type: "boolean"
|
|
},
|
|
lock_repositories: {
|
|
type: "boolean"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repositories: {
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations"
|
|
},
|
|
startImport: {
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tfvc_project: {
|
|
type: "string"
|
|
},
|
|
vcs: {
|
|
enum: ["subversion", "git", "mercurial", "tfvc"],
|
|
type: "string"
|
|
},
|
|
vcs_password: {
|
|
type: "string"
|
|
},
|
|
vcs_url: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
vcs_username: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import"
|
|
},
|
|
unlockRepoForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo_name: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/migrations/:migration_id/repos/:repo_name/lock"
|
|
},
|
|
unlockRepoForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.wyandotte-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
migration_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo_name: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock"
|
|
},
|
|
updateImport: {
|
|
method: "PATCH",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
vcs_password: {
|
|
type: "string"
|
|
},
|
|
vcs_username: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/import"
|
|
}
|
|
},
|
|
oauthAuthorizations: {
|
|
checkAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)",
|
|
method: "GET",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
createAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization",
|
|
method: "POST",
|
|
params: {
|
|
client_id: {
|
|
type: "string"
|
|
},
|
|
client_secret: {
|
|
type: "string"
|
|
},
|
|
fingerprint: {
|
|
type: "string"
|
|
},
|
|
note: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
note_url: {
|
|
type: "string"
|
|
},
|
|
scopes: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/authorizations"
|
|
},
|
|
deleteAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization",
|
|
method: "DELETE",
|
|
params: {
|
|
authorization_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/authorizations/:authorization_id"
|
|
},
|
|
deleteGrant: {
|
|
deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant",
|
|
method: "DELETE",
|
|
params: {
|
|
grant_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/applications/grants/:grant_id"
|
|
},
|
|
getAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization",
|
|
method: "GET",
|
|
params: {
|
|
authorization_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/authorizations/:authorization_id"
|
|
},
|
|
getGrant: {
|
|
deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant",
|
|
method: "GET",
|
|
params: {
|
|
grant_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/applications/grants/:grant_id"
|
|
},
|
|
getOrCreateAuthorizationForApp: {
|
|
deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app",
|
|
method: "PUT",
|
|
params: {
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_secret: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
fingerprint: {
|
|
type: "string"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
},
|
|
note_url: {
|
|
type: "string"
|
|
},
|
|
scopes: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/authorizations/clients/:client_id"
|
|
},
|
|
getOrCreateAuthorizationForAppAndFingerprint: {
|
|
deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint",
|
|
method: "PUT",
|
|
params: {
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_secret: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
fingerprint: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
},
|
|
note_url: {
|
|
type: "string"
|
|
},
|
|
scopes: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/authorizations/clients/:client_id/:fingerprint"
|
|
},
|
|
getOrCreateAuthorizationForAppFingerprint: {
|
|
deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)",
|
|
method: "PUT",
|
|
params: {
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_secret: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
fingerprint: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
},
|
|
note_url: {
|
|
type: "string"
|
|
},
|
|
scopes: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/authorizations/clients/:client_id/:fingerprint"
|
|
},
|
|
listAuthorizations: {
|
|
deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/authorizations"
|
|
},
|
|
listGrants: {
|
|
deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/applications/grants"
|
|
},
|
|
resetAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)",
|
|
method: "POST",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
revokeAuthorizationForApplication: {
|
|
deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)",
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/tokens/:access_token"
|
|
},
|
|
revokeGrantForApplication: {
|
|
deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)",
|
|
method: "DELETE",
|
|
params: {
|
|
access_token: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
client_id: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/applications/:client_id/grants/:access_token"
|
|
},
|
|
updateAuthorization: {
|
|
deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization",
|
|
method: "PATCH",
|
|
params: {
|
|
add_scopes: {
|
|
type: "string[]"
|
|
},
|
|
authorization_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
fingerprint: {
|
|
type: "string"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
},
|
|
note_url: {
|
|
type: "string"
|
|
},
|
|
remove_scopes: {
|
|
type: "string[]"
|
|
},
|
|
scopes: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/authorizations/:authorization_id"
|
|
}
|
|
},
|
|
orgs: {
|
|
addOrUpdateMembership: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
role: {
|
|
enum: ["admin", "member"],
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/memberships/:username"
|
|
},
|
|
blockUser: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/blocks/:username"
|
|
},
|
|
checkBlockedUser: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/blocks/:username"
|
|
},
|
|
checkMembership: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/members/:username"
|
|
},
|
|
checkPublicMembership: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/public_members/:username"
|
|
},
|
|
concealMembership: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/public_members/:username"
|
|
},
|
|
convertMemberToOutsideCollaborator: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/outside_collaborators/:username"
|
|
},
|
|
createHook: {
|
|
method: "POST",
|
|
params: {
|
|
active: {
|
|
type: "boolean"
|
|
},
|
|
config: {
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"config.content_type": {
|
|
type: "string"
|
|
},
|
|
"config.insecure_ssl": {
|
|
type: "string"
|
|
},
|
|
"config.secret": {
|
|
type: "string"
|
|
},
|
|
"config.url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
events: {
|
|
type: "string[]"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks"
|
|
},
|
|
createInvitation: {
|
|
method: "POST",
|
|
params: {
|
|
email: {
|
|
type: "string"
|
|
},
|
|
invitee_id: {
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
role: {
|
|
enum: ["admin", "direct_member", "billing_manager"],
|
|
type: "string"
|
|
},
|
|
team_ids: {
|
|
type: "integer[]"
|
|
}
|
|
},
|
|
url: "/orgs/:org/invitations"
|
|
},
|
|
deleteHook: {
|
|
method: "DELETE",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks/:hook_id"
|
|
},
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org"
|
|
},
|
|
getHook: {
|
|
method: "GET",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks/:hook_id"
|
|
},
|
|
getMembership: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/memberships/:username"
|
|
},
|
|
getMembershipForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/memberships/orgs/:org"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/organizations"
|
|
},
|
|
listBlockedUsers: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/blocks"
|
|
},
|
|
listForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/orgs"
|
|
},
|
|
listForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/orgs"
|
|
},
|
|
listHooks: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks"
|
|
},
|
|
listInstallations: {
|
|
headers: {
|
|
accept: "application/vnd.github.machine-man-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/installations"
|
|
},
|
|
listInvitationTeams: {
|
|
method: "GET",
|
|
params: {
|
|
invitation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/invitations/:invitation_id/teams"
|
|
},
|
|
listMembers: {
|
|
method: "GET",
|
|
params: {
|
|
filter: {
|
|
enum: ["2fa_disabled", "all"],
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
role: {
|
|
enum: ["all", "admin", "member"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/members"
|
|
},
|
|
listMemberships: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
state: {
|
|
enum: ["active", "pending"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/memberships/orgs"
|
|
},
|
|
listOutsideCollaborators: {
|
|
method: "GET",
|
|
params: {
|
|
filter: {
|
|
enum: ["2fa_disabled", "all"],
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/outside_collaborators"
|
|
},
|
|
listPendingInvitations: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/invitations"
|
|
},
|
|
listPublicMembers: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/public_members"
|
|
},
|
|
pingHook: {
|
|
method: "POST",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks/:hook_id/pings"
|
|
},
|
|
publicizeMembership: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/public_members/:username"
|
|
},
|
|
removeMember: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/members/:username"
|
|
},
|
|
removeMembership: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/memberships/:username"
|
|
},
|
|
removeOutsideCollaborator: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/outside_collaborators/:username"
|
|
},
|
|
unblockUser: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/blocks/:username"
|
|
},
|
|
update: {
|
|
method: "PATCH",
|
|
params: {
|
|
billing_email: {
|
|
type: "string"
|
|
},
|
|
company: {
|
|
type: "string"
|
|
},
|
|
default_repository_permission: {
|
|
enum: ["read", "write", "admin", "none"],
|
|
type: "string"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
email: {
|
|
type: "string"
|
|
},
|
|
has_organization_projects: {
|
|
type: "boolean"
|
|
},
|
|
has_repository_projects: {
|
|
type: "boolean"
|
|
},
|
|
location: {
|
|
type: "string"
|
|
},
|
|
members_allowed_repository_creation_type: {
|
|
enum: ["all", "private", "none"],
|
|
type: "string"
|
|
},
|
|
members_can_create_internal_repositories: {
|
|
type: "boolean"
|
|
},
|
|
members_can_create_private_repositories: {
|
|
type: "boolean"
|
|
},
|
|
members_can_create_public_repositories: {
|
|
type: "boolean"
|
|
},
|
|
members_can_create_repositories: {
|
|
type: "boolean"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org"
|
|
},
|
|
updateHook: {
|
|
method: "PATCH",
|
|
params: {
|
|
active: {
|
|
type: "boolean"
|
|
},
|
|
config: {
|
|
type: "object"
|
|
},
|
|
"config.content_type": {
|
|
type: "string"
|
|
},
|
|
"config.insecure_ssl": {
|
|
type: "string"
|
|
},
|
|
"config.secret": {
|
|
type: "string"
|
|
},
|
|
"config.url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
events: {
|
|
type: "string[]"
|
|
},
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/hooks/:hook_id"
|
|
},
|
|
updateMembership: {
|
|
method: "PATCH",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["active"],
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/memberships/orgs/:org"
|
|
}
|
|
},
|
|
projects: {
|
|
addCollaborator: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
permission: {
|
|
enum: ["read", "write", "admin"],
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/collaborators/:username"
|
|
},
|
|
createCard: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content_id: {
|
|
type: "integer"
|
|
},
|
|
content_type: {
|
|
type: "string"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id/cards"
|
|
},
|
|
createColumn: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/columns"
|
|
},
|
|
createForAuthenticatedUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/projects"
|
|
},
|
|
createForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/projects"
|
|
},
|
|
createForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/projects"
|
|
},
|
|
delete: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/:project_id"
|
|
},
|
|
deleteCard: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
card_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/columns/cards/:card_id"
|
|
},
|
|
deleteColumn: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id"
|
|
},
|
|
get: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/:project_id"
|
|
},
|
|
getCard: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
card_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/columns/cards/:card_id"
|
|
},
|
|
getColumn: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id"
|
|
},
|
|
listCards: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
archived_state: {
|
|
enum: ["all", "archived", "not_archived"],
|
|
type: "string"
|
|
},
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id/cards"
|
|
},
|
|
listCollaborators: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
affiliation: {
|
|
enum: ["outside", "direct", "all"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/collaborators"
|
|
},
|
|
listColumns: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/columns"
|
|
},
|
|
listForOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/projects"
|
|
},
|
|
listForRepo: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/projects"
|
|
},
|
|
listForUser: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/projects"
|
|
},
|
|
moveCard: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
card_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
column_id: {
|
|
type: "integer"
|
|
},
|
|
position: {
|
|
required: true,
|
|
type: "string",
|
|
validation: "^(top|bottom|after:\\d+)$"
|
|
}
|
|
},
|
|
url: "/projects/columns/cards/:card_id/moves"
|
|
},
|
|
moveColumn: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
position: {
|
|
required: true,
|
|
type: "string",
|
|
validation: "^(first|last|after:\\d+)$"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id/moves"
|
|
},
|
|
removeCollaborator: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/collaborators/:username"
|
|
},
|
|
reviewUserPermissionLevel: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/:project_id/collaborators/:username/permission"
|
|
},
|
|
update: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
organization_permission: {
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/:project_id"
|
|
},
|
|
updateCard: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
archived: {
|
|
type: "boolean"
|
|
},
|
|
card_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
note: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/columns/cards/:card_id"
|
|
},
|
|
updateColumn: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PATCH",
|
|
params: {
|
|
column_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/projects/columns/:column_id"
|
|
}
|
|
},
|
|
pulls: {
|
|
checkIfMerged: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/merge"
|
|
},
|
|
create: {
|
|
method: "POST",
|
|
params: {
|
|
base: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
body: {
|
|
type: "string"
|
|
},
|
|
draft: {
|
|
type: "boolean"
|
|
},
|
|
head: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
maintainer_can_modify: {
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls"
|
|
},
|
|
createComment: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
commit_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
in_reply_to: {
|
|
deprecated: true,
|
|
description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
|
|
type: "integer"
|
|
},
|
|
line: {
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
position: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
side: {
|
|
enum: ["LEFT", "RIGHT"],
|
|
type: "string"
|
|
},
|
|
start_line: {
|
|
type: "integer"
|
|
},
|
|
start_side: {
|
|
enum: ["LEFT", "RIGHT", "side"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/comments"
|
|
},
|
|
createCommentReply: {
|
|
deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)",
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
commit_id: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
in_reply_to: {
|
|
deprecated: true,
|
|
description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.",
|
|
type: "integer"
|
|
},
|
|
line: {
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
position: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
side: {
|
|
enum: ["LEFT", "RIGHT"],
|
|
type: "string"
|
|
},
|
|
start_line: {
|
|
type: "integer"
|
|
},
|
|
start_side: {
|
|
enum: ["LEFT", "RIGHT", "side"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/comments"
|
|
},
|
|
createFromIssue: {
|
|
deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request",
|
|
method: "POST",
|
|
params: {
|
|
base: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
draft: {
|
|
type: "boolean"
|
|
},
|
|
head: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
issue: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
maintainer_can_modify: {
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls"
|
|
},
|
|
createReview: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
comments: {
|
|
type: "object[]"
|
|
},
|
|
"comments[].body": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"comments[].path": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"comments[].position": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
commit_id: {
|
|
type: "string"
|
|
},
|
|
event: {
|
|
enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
|
|
},
|
|
createReviewCommentReply: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies"
|
|
},
|
|
createReviewRequest: {
|
|
method: "POST",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
reviewers: {
|
|
type: "string[]"
|
|
},
|
|
team_reviewers: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
|
|
},
|
|
deleteComment: {
|
|
method: "DELETE",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments/:comment_id"
|
|
},
|
|
deletePendingReview: {
|
|
method: "DELETE",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
|
|
},
|
|
deleteReviewRequest: {
|
|
method: "DELETE",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
reviewers: {
|
|
type: "string[]"
|
|
},
|
|
team_reviewers: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
|
|
},
|
|
dismissReview: {
|
|
method: "PUT",
|
|
params: {
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals"
|
|
},
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number"
|
|
},
|
|
getComment: {
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments/:comment_id"
|
|
},
|
|
getCommentsForReview: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments"
|
|
},
|
|
getReview: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
base: {
|
|
type: "string"
|
|
},
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
head: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "popularity", "long-running"],
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed", "all"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls"
|
|
},
|
|
listComments: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/comments"
|
|
},
|
|
listCommentsForRepo: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments"
|
|
},
|
|
listCommits: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/commits"
|
|
},
|
|
listFiles: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/files"
|
|
},
|
|
listReviewRequests: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers"
|
|
},
|
|
listReviews: {
|
|
method: "GET",
|
|
params: {
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews"
|
|
},
|
|
merge: {
|
|
method: "PUT",
|
|
params: {
|
|
commit_message: {
|
|
type: "string"
|
|
},
|
|
commit_title: {
|
|
type: "string"
|
|
},
|
|
merge_method: {
|
|
enum: ["merge", "squash", "rebase"],
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/merge"
|
|
},
|
|
submitReview: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
event: {
|
|
enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events"
|
|
},
|
|
update: {
|
|
method: "PATCH",
|
|
params: {
|
|
base: {
|
|
type: "string"
|
|
},
|
|
body: {
|
|
type: "string"
|
|
},
|
|
maintainer_can_modify: {
|
|
type: "boolean"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["open", "closed"],
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number"
|
|
},
|
|
updateBranch: {
|
|
headers: {
|
|
accept: "application/vnd.github.lydian-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
expected_head_sha: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/update-branch"
|
|
},
|
|
updateComment: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments/:comment_id"
|
|
},
|
|
updateReview: {
|
|
method: "PUT",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
number: {
|
|
alias: "pull_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
pull_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
review_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id"
|
|
}
|
|
},
|
|
rateLimit: {
|
|
get: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/rate_limit"
|
|
}
|
|
},
|
|
reactions: {
|
|
createForCommitComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments/:comment_id/reactions"
|
|
},
|
|
createForIssue: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/reactions"
|
|
},
|
|
createForIssueComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
|
|
},
|
|
createForPullRequestReviewComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
|
|
},
|
|
createForTeamDiscussion: {
|
|
deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/reactions"
|
|
},
|
|
createForTeamDiscussionComment: {
|
|
deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
createForTeamDiscussionCommentInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
createForTeamDiscussionCommentLegacy: {
|
|
deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
createForTeamDiscussionInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
|
|
},
|
|
createForTeamDiscussionLegacy: {
|
|
deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/reactions"
|
|
},
|
|
delete: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
reaction_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/reactions/:reaction_id"
|
|
},
|
|
listForCommitComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments/:comment_id/reactions"
|
|
},
|
|
listForIssue: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
issue_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
number: {
|
|
alias: "issue_number",
|
|
deprecated: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/:issue_number/reactions"
|
|
},
|
|
listForIssueComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions"
|
|
},
|
|
listForPullRequestReviewComment: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions"
|
|
},
|
|
listForTeamDiscussion: {
|
|
deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/reactions"
|
|
},
|
|
listForTeamDiscussionComment: {
|
|
deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
listForTeamDiscussionCommentInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
listForTeamDiscussionCommentLegacy: {
|
|
deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions"
|
|
},
|
|
listForTeamDiscussionInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions"
|
|
},
|
|
listForTeamDiscussionLegacy: {
|
|
deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.squirrel-girl-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
content: {
|
|
enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/reactions"
|
|
}
|
|
},
|
|
repos: {
|
|
acceptInvitation: {
|
|
method: "PATCH",
|
|
params: {
|
|
invitation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/repository_invitations/:invitation_id"
|
|
},
|
|
addCollaborator: {
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/collaborators/:username"
|
|
},
|
|
addDeployKey: {
|
|
method: "POST",
|
|
params: {
|
|
key: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
read_only: {
|
|
type: "boolean"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/keys"
|
|
},
|
|
addProtectedBranchAdminEnforcement: {
|
|
method: "POST",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
|
|
},
|
|
addProtectedBranchAppRestrictions: {
|
|
method: "POST",
|
|
params: {
|
|
apps: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
|
|
},
|
|
addProtectedBranchRequiredSignatures: {
|
|
headers: {
|
|
accept: "application/vnd.github.zzzax-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
|
|
},
|
|
addProtectedBranchRequiredStatusChecksContexts: {
|
|
method: "POST",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
contexts: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
|
|
},
|
|
addProtectedBranchTeamRestrictions: {
|
|
method: "POST",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
teams: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
addProtectedBranchUserRestrictions: {
|
|
method: "POST",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
users: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
checkCollaborator: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/collaborators/:username"
|
|
},
|
|
checkVulnerabilityAlerts: {
|
|
headers: {
|
|
accept: "application/vnd.github.dorian-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/vulnerability-alerts"
|
|
},
|
|
compareCommits: {
|
|
method: "GET",
|
|
params: {
|
|
base: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
head: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/compare/:base...:head"
|
|
},
|
|
createCommitComment: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
commit_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
line: {
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
type: "string"
|
|
},
|
|
position: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
alias: "commit_sha",
|
|
deprecated: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:commit_sha/comments"
|
|
},
|
|
createDeployment: {
|
|
method: "POST",
|
|
params: {
|
|
auto_merge: {
|
|
type: "boolean"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
environment: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
payload: {
|
|
type: "string"
|
|
},
|
|
production_environment: {
|
|
type: "boolean"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
required_contexts: {
|
|
type: "string[]"
|
|
},
|
|
task: {
|
|
type: "string"
|
|
},
|
|
transient_environment: {
|
|
type: "boolean"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments"
|
|
},
|
|
createDeploymentStatus: {
|
|
method: "POST",
|
|
params: {
|
|
auto_inactive: {
|
|
type: "boolean"
|
|
},
|
|
deployment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
environment: {
|
|
enum: ["production", "staging", "qa"],
|
|
type: "string"
|
|
},
|
|
environment_url: {
|
|
type: "string"
|
|
},
|
|
log_url: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
target_url: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
|
|
},
|
|
createDispatchEvent: {
|
|
method: "POST",
|
|
params: {
|
|
client_payload: {
|
|
type: "object"
|
|
},
|
|
event_type: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/dispatches"
|
|
},
|
|
createFile: {
|
|
deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
|
|
method: "PUT",
|
|
params: {
|
|
author: {
|
|
type: "object"
|
|
},
|
|
"author.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"author.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
committer: {
|
|
type: "object"
|
|
},
|
|
"committer.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"committer.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
content: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contents/:path"
|
|
},
|
|
createForAuthenticatedUser: {
|
|
method: "POST",
|
|
params: {
|
|
allow_merge_commit: {
|
|
type: "boolean"
|
|
},
|
|
allow_rebase_merge: {
|
|
type: "boolean"
|
|
},
|
|
allow_squash_merge: {
|
|
type: "boolean"
|
|
},
|
|
auto_init: {
|
|
type: "boolean"
|
|
},
|
|
delete_branch_on_merge: {
|
|
type: "boolean"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
gitignore_template: {
|
|
type: "string"
|
|
},
|
|
has_issues: {
|
|
type: "boolean"
|
|
},
|
|
has_projects: {
|
|
type: "boolean"
|
|
},
|
|
has_wiki: {
|
|
type: "boolean"
|
|
},
|
|
homepage: {
|
|
type: "string"
|
|
},
|
|
is_template: {
|
|
type: "boolean"
|
|
},
|
|
license_template: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
team_id: {
|
|
type: "integer"
|
|
},
|
|
visibility: {
|
|
enum: ["public", "private", "visibility", "internal"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/repos"
|
|
},
|
|
createFork: {
|
|
method: "POST",
|
|
params: {
|
|
organization: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/forks"
|
|
},
|
|
createHook: {
|
|
method: "POST",
|
|
params: {
|
|
active: {
|
|
type: "boolean"
|
|
},
|
|
config: {
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"config.content_type": {
|
|
type: "string"
|
|
},
|
|
"config.insecure_ssl": {
|
|
type: "string"
|
|
},
|
|
"config.secret": {
|
|
type: "string"
|
|
},
|
|
"config.url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
events: {
|
|
type: "string[]"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks"
|
|
},
|
|
createInOrg: {
|
|
method: "POST",
|
|
params: {
|
|
allow_merge_commit: {
|
|
type: "boolean"
|
|
},
|
|
allow_rebase_merge: {
|
|
type: "boolean"
|
|
},
|
|
allow_squash_merge: {
|
|
type: "boolean"
|
|
},
|
|
auto_init: {
|
|
type: "boolean"
|
|
},
|
|
delete_branch_on_merge: {
|
|
type: "boolean"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
gitignore_template: {
|
|
type: "string"
|
|
},
|
|
has_issues: {
|
|
type: "boolean"
|
|
},
|
|
has_projects: {
|
|
type: "boolean"
|
|
},
|
|
has_wiki: {
|
|
type: "boolean"
|
|
},
|
|
homepage: {
|
|
type: "string"
|
|
},
|
|
is_template: {
|
|
type: "boolean"
|
|
},
|
|
license_template: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
team_id: {
|
|
type: "integer"
|
|
},
|
|
visibility: {
|
|
enum: ["public", "private", "visibility", "internal"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/repos"
|
|
},
|
|
createOrUpdateFile: {
|
|
method: "PUT",
|
|
params: {
|
|
author: {
|
|
type: "object"
|
|
},
|
|
"author.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"author.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
committer: {
|
|
type: "object"
|
|
},
|
|
"committer.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"committer.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
content: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contents/:path"
|
|
},
|
|
createRelease: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
draft: {
|
|
type: "boolean"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
prerelease: {
|
|
type: "boolean"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tag_name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
target_commitish: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases"
|
|
},
|
|
createStatus: {
|
|
method: "POST",
|
|
params: {
|
|
context: {
|
|
type: "string"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
state: {
|
|
enum: ["error", "failure", "pending", "success"],
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
target_url: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/statuses/:sha"
|
|
},
|
|
createUsingTemplate: {
|
|
headers: {
|
|
accept: "application/vnd.github.baptiste-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
template_owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
template_repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:template_owner/:template_repo/generate"
|
|
},
|
|
declineInvitation: {
|
|
method: "DELETE",
|
|
params: {
|
|
invitation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/repository_invitations/:invitation_id"
|
|
},
|
|
delete: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo"
|
|
},
|
|
deleteCommitComment: {
|
|
method: "DELETE",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments/:comment_id"
|
|
},
|
|
deleteDownload: {
|
|
method: "DELETE",
|
|
params: {
|
|
download_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/downloads/:download_id"
|
|
},
|
|
deleteFile: {
|
|
method: "DELETE",
|
|
params: {
|
|
author: {
|
|
type: "object"
|
|
},
|
|
"author.email": {
|
|
type: "string"
|
|
},
|
|
"author.name": {
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
committer: {
|
|
type: "object"
|
|
},
|
|
"committer.email": {
|
|
type: "string"
|
|
},
|
|
"committer.name": {
|
|
type: "string"
|
|
},
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contents/:path"
|
|
},
|
|
deleteHook: {
|
|
method: "DELETE",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks/:hook_id"
|
|
},
|
|
deleteInvitation: {
|
|
method: "DELETE",
|
|
params: {
|
|
invitation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/invitations/:invitation_id"
|
|
},
|
|
deleteRelease: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
release_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/:release_id"
|
|
},
|
|
deleteReleaseAsset: {
|
|
method: "DELETE",
|
|
params: {
|
|
asset_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/assets/:asset_id"
|
|
},
|
|
disableAutomatedSecurityFixes: {
|
|
headers: {
|
|
accept: "application/vnd.github.london-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/automated-security-fixes"
|
|
},
|
|
disablePagesSite: {
|
|
headers: {
|
|
accept: "application/vnd.github.switcheroo-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages"
|
|
},
|
|
disableVulnerabilityAlerts: {
|
|
headers: {
|
|
accept: "application/vnd.github.dorian-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/vulnerability-alerts"
|
|
},
|
|
enableAutomatedSecurityFixes: {
|
|
headers: {
|
|
accept: "application/vnd.github.london-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/automated-security-fixes"
|
|
},
|
|
enablePagesSite: {
|
|
headers: {
|
|
accept: "application/vnd.github.switcheroo-preview+json"
|
|
},
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
source: {
|
|
type: "object"
|
|
},
|
|
"source.branch": {
|
|
enum: ["master", "gh-pages"],
|
|
type: "string"
|
|
},
|
|
"source.path": {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages"
|
|
},
|
|
enableVulnerabilityAlerts: {
|
|
headers: {
|
|
accept: "application/vnd.github.dorian-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/vulnerability-alerts"
|
|
},
|
|
get: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo"
|
|
},
|
|
getAppsWithAccessToProtectedBranch: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
|
|
},
|
|
getArchiveLink: {
|
|
method: "GET",
|
|
params: {
|
|
archive_format: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/:archive_format/:ref"
|
|
},
|
|
getBranch: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch"
|
|
},
|
|
getBranchProtection: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection"
|
|
},
|
|
getClones: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
per: {
|
|
enum: ["day", "week"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/traffic/clones"
|
|
},
|
|
getCodeFrequencyStats: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stats/code_frequency"
|
|
},
|
|
getCollaboratorPermissionLevel: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/collaborators/:username/permission"
|
|
},
|
|
getCombinedStatusForRef: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref/status"
|
|
},
|
|
getCommit: {
|
|
method: "GET",
|
|
params: {
|
|
commit_sha: {
|
|
alias: "ref",
|
|
deprecated: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
alias: "ref",
|
|
deprecated: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref"
|
|
},
|
|
getCommitActivityStats: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stats/commit_activity"
|
|
},
|
|
getCommitComment: {
|
|
method: "GET",
|
|
params: {
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments/:comment_id"
|
|
},
|
|
getCommitRefSha: {
|
|
deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit",
|
|
headers: {
|
|
accept: "application/vnd.github.v3.sha"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref"
|
|
},
|
|
getContents: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contents/:path"
|
|
},
|
|
getContributorsStats: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stats/contributors"
|
|
},
|
|
getDeployKey: {
|
|
method: "GET",
|
|
params: {
|
|
key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/keys/:key_id"
|
|
},
|
|
getDeployment: {
|
|
method: "GET",
|
|
params: {
|
|
deployment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments/:deployment_id"
|
|
},
|
|
getDeploymentStatus: {
|
|
method: "GET",
|
|
params: {
|
|
deployment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
status_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id"
|
|
},
|
|
getDownload: {
|
|
method: "GET",
|
|
params: {
|
|
download_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/downloads/:download_id"
|
|
},
|
|
getHook: {
|
|
method: "GET",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks/:hook_id"
|
|
},
|
|
getLatestPagesBuild: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages/builds/latest"
|
|
},
|
|
getLatestRelease: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/latest"
|
|
},
|
|
getPages: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages"
|
|
},
|
|
getPagesBuild: {
|
|
method: "GET",
|
|
params: {
|
|
build_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages/builds/:build_id"
|
|
},
|
|
getParticipationStats: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stats/participation"
|
|
},
|
|
getProtectedBranchAdminEnforcement: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
|
|
},
|
|
getProtectedBranchPullRequestReviewEnforcement: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
|
|
},
|
|
getProtectedBranchRequiredSignatures: {
|
|
headers: {
|
|
accept: "application/vnd.github.zzzax-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
|
|
},
|
|
getProtectedBranchRequiredStatusChecks: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
|
|
},
|
|
getProtectedBranchRestrictions: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
|
|
},
|
|
getPunchCardStats: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/stats/punch_card"
|
|
},
|
|
getReadme: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
ref: {
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/readme"
|
|
},
|
|
getRelease: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
release_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/:release_id"
|
|
},
|
|
getReleaseAsset: {
|
|
method: "GET",
|
|
params: {
|
|
asset_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/assets/:asset_id"
|
|
},
|
|
getReleaseByTag: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tag: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/tags/:tag"
|
|
},
|
|
getTeamsWithAccessToProtectedBranch: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
getTopPaths: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/traffic/popular/paths"
|
|
},
|
|
getTopReferrers: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/traffic/popular/referrers"
|
|
},
|
|
getUsersWithAccessToProtectedBranch: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
getViews: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
per: {
|
|
enum: ["day", "week"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/traffic/views"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
affiliation: {
|
|
type: "string"
|
|
},
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "pushed", "full_name"],
|
|
type: "string"
|
|
},
|
|
type: {
|
|
enum: ["all", "owner", "public", "private", "member"],
|
|
type: "string"
|
|
},
|
|
visibility: {
|
|
enum: ["all", "public", "private"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/repos"
|
|
},
|
|
listAppsWithAccessToProtectedBranch: {
|
|
deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)",
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
|
|
},
|
|
listAssetsForRelease: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
release_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/:release_id/assets"
|
|
},
|
|
listBranches: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
protected: {
|
|
type: "boolean"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches"
|
|
},
|
|
listBranchesForHeadCommit: {
|
|
headers: {
|
|
accept: "application/vnd.github.groot-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
commit_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head"
|
|
},
|
|
listCollaborators: {
|
|
method: "GET",
|
|
params: {
|
|
affiliation: {
|
|
enum: ["outside", "direct", "all"],
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/collaborators"
|
|
},
|
|
listCommentsForCommit: {
|
|
method: "GET",
|
|
params: {
|
|
commit_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
alias: "commit_sha",
|
|
deprecated: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:commit_sha/comments"
|
|
},
|
|
listCommitComments: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments"
|
|
},
|
|
listCommits: {
|
|
method: "GET",
|
|
params: {
|
|
author: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
path: {
|
|
type: "string"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
},
|
|
until: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits"
|
|
},
|
|
listContributors: {
|
|
method: "GET",
|
|
params: {
|
|
anon: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contributors"
|
|
},
|
|
listDeployKeys: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/keys"
|
|
},
|
|
listDeploymentStatuses: {
|
|
method: "GET",
|
|
params: {
|
|
deployment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments/:deployment_id/statuses"
|
|
},
|
|
listDeployments: {
|
|
method: "GET",
|
|
params: {
|
|
environment: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
},
|
|
task: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/deployments"
|
|
},
|
|
listDownloads: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/downloads"
|
|
},
|
|
listForOrg: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "pushed", "full_name"],
|
|
type: "string"
|
|
},
|
|
type: {
|
|
enum: ["all", "public", "private", "forks", "sources", "member", "internal"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/repos"
|
|
},
|
|
listForUser: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated", "pushed", "full_name"],
|
|
type: "string"
|
|
},
|
|
type: {
|
|
enum: ["all", "owner", "member"],
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/repos"
|
|
},
|
|
listForks: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["newest", "oldest", "stargazers"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/forks"
|
|
},
|
|
listHooks: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks"
|
|
},
|
|
listInvitations: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/invitations"
|
|
},
|
|
listInvitationsForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/repository_invitations"
|
|
},
|
|
listLanguages: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/languages"
|
|
},
|
|
listPagesBuilds: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages/builds"
|
|
},
|
|
listProtectedBranchRequiredStatusChecksContexts: {
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
|
|
},
|
|
listProtectedBranchTeamRestrictions: {
|
|
deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)",
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
listProtectedBranchUserRestrictions: {
|
|
deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)",
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
listPublic: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repositories"
|
|
},
|
|
listPullRequestsAssociatedWithCommit: {
|
|
headers: {
|
|
accept: "application/vnd.github.groot-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
commit_sha: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:commit_sha/pulls"
|
|
},
|
|
listReleases: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases"
|
|
},
|
|
listStatusesForRef: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
ref: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/commits/:ref/statuses"
|
|
},
|
|
listTags: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/tags"
|
|
},
|
|
listTeams: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/teams"
|
|
},
|
|
listTeamsWithAccessToProtectedBranch: {
|
|
deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)",
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
listTopics: {
|
|
headers: {
|
|
accept: "application/vnd.github.mercy-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/topics"
|
|
},
|
|
listUsersWithAccessToProtectedBranch: {
|
|
deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)",
|
|
method: "GET",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
merge: {
|
|
method: "POST",
|
|
params: {
|
|
base: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
commit_message: {
|
|
type: "string"
|
|
},
|
|
head: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/merges"
|
|
},
|
|
pingHook: {
|
|
method: "POST",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks/:hook_id/pings"
|
|
},
|
|
removeBranchProtection: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection"
|
|
},
|
|
removeCollaborator: {
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/collaborators/:username"
|
|
},
|
|
removeDeployKey: {
|
|
method: "DELETE",
|
|
params: {
|
|
key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/keys/:key_id"
|
|
},
|
|
removeProtectedBranchAdminEnforcement: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins"
|
|
},
|
|
removeProtectedBranchAppRestrictions: {
|
|
method: "DELETE",
|
|
params: {
|
|
apps: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
|
|
},
|
|
removeProtectedBranchPullRequestReviewEnforcement: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
|
|
},
|
|
removeProtectedBranchRequiredSignatures: {
|
|
headers: {
|
|
accept: "application/vnd.github.zzzax-preview+json"
|
|
},
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures"
|
|
},
|
|
removeProtectedBranchRequiredStatusChecks: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
|
|
},
|
|
removeProtectedBranchRequiredStatusChecksContexts: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
contexts: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
|
|
},
|
|
removeProtectedBranchRestrictions: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions"
|
|
},
|
|
removeProtectedBranchTeamRestrictions: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
teams: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
removeProtectedBranchUserRestrictions: {
|
|
method: "DELETE",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
users: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
replaceProtectedBranchAppRestrictions: {
|
|
method: "PUT",
|
|
params: {
|
|
apps: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps"
|
|
},
|
|
replaceProtectedBranchRequiredStatusChecksContexts: {
|
|
method: "PUT",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
contexts: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts"
|
|
},
|
|
replaceProtectedBranchTeamRestrictions: {
|
|
method: "PUT",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
teams: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams"
|
|
},
|
|
replaceProtectedBranchUserRestrictions: {
|
|
method: "PUT",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
users: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users"
|
|
},
|
|
replaceTopics: {
|
|
headers: {
|
|
accept: "application/vnd.github.mercy-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
names: {
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/topics"
|
|
},
|
|
requestPageBuild: {
|
|
method: "POST",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages/builds"
|
|
},
|
|
retrieveCommunityProfileMetrics: {
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/community/profile"
|
|
},
|
|
testPushHook: {
|
|
method: "POST",
|
|
params: {
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks/:hook_id/tests"
|
|
},
|
|
transfer: {
|
|
method: "POST",
|
|
params: {
|
|
new_owner: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_ids: {
|
|
type: "integer[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/transfer"
|
|
},
|
|
update: {
|
|
method: "PATCH",
|
|
params: {
|
|
allow_merge_commit: {
|
|
type: "boolean"
|
|
},
|
|
allow_rebase_merge: {
|
|
type: "boolean"
|
|
},
|
|
allow_squash_merge: {
|
|
type: "boolean"
|
|
},
|
|
archived: {
|
|
type: "boolean"
|
|
},
|
|
default_branch: {
|
|
type: "string"
|
|
},
|
|
delete_branch_on_merge: {
|
|
type: "boolean"
|
|
},
|
|
description: {
|
|
type: "string"
|
|
},
|
|
has_issues: {
|
|
type: "boolean"
|
|
},
|
|
has_projects: {
|
|
type: "boolean"
|
|
},
|
|
has_wiki: {
|
|
type: "boolean"
|
|
},
|
|
homepage: {
|
|
type: "string"
|
|
},
|
|
is_template: {
|
|
type: "boolean"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
visibility: {
|
|
enum: ["public", "private", "visibility", "internal"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo"
|
|
},
|
|
updateBranchProtection: {
|
|
method: "PUT",
|
|
params: {
|
|
allow_deletions: {
|
|
type: "boolean"
|
|
},
|
|
allow_force_pushes: {
|
|
allowNull: true,
|
|
type: "boolean"
|
|
},
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
enforce_admins: {
|
|
allowNull: true,
|
|
required: true,
|
|
type: "boolean"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
required_linear_history: {
|
|
type: "boolean"
|
|
},
|
|
required_pull_request_reviews: {
|
|
allowNull: true,
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"required_pull_request_reviews.dismiss_stale_reviews": {
|
|
type: "boolean"
|
|
},
|
|
"required_pull_request_reviews.dismissal_restrictions": {
|
|
type: "object"
|
|
},
|
|
"required_pull_request_reviews.dismissal_restrictions.teams": {
|
|
type: "string[]"
|
|
},
|
|
"required_pull_request_reviews.dismissal_restrictions.users": {
|
|
type: "string[]"
|
|
},
|
|
"required_pull_request_reviews.require_code_owner_reviews": {
|
|
type: "boolean"
|
|
},
|
|
"required_pull_request_reviews.required_approving_review_count": {
|
|
type: "integer"
|
|
},
|
|
required_status_checks: {
|
|
allowNull: true,
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"required_status_checks.contexts": {
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
"required_status_checks.strict": {
|
|
required: true,
|
|
type: "boolean"
|
|
},
|
|
restrictions: {
|
|
allowNull: true,
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"restrictions.apps": {
|
|
type: "string[]"
|
|
},
|
|
"restrictions.teams": {
|
|
required: true,
|
|
type: "string[]"
|
|
},
|
|
"restrictions.users": {
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection"
|
|
},
|
|
updateCommitComment: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/comments/:comment_id"
|
|
},
|
|
updateFile: {
|
|
deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)",
|
|
method: "PUT",
|
|
params: {
|
|
author: {
|
|
type: "object"
|
|
},
|
|
"author.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"author.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
branch: {
|
|
type: "string"
|
|
},
|
|
committer: {
|
|
type: "object"
|
|
},
|
|
"committer.email": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
"committer.name": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
content: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
message: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
path: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sha: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/contents/:path"
|
|
},
|
|
updateHook: {
|
|
method: "PATCH",
|
|
params: {
|
|
active: {
|
|
type: "boolean"
|
|
},
|
|
add_events: {
|
|
type: "string[]"
|
|
},
|
|
config: {
|
|
type: "object"
|
|
},
|
|
"config.content_type": {
|
|
type: "string"
|
|
},
|
|
"config.insecure_ssl": {
|
|
type: "string"
|
|
},
|
|
"config.secret": {
|
|
type: "string"
|
|
},
|
|
"config.url": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
events: {
|
|
type: "string[]"
|
|
},
|
|
hook_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
remove_events: {
|
|
type: "string[]"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/hooks/:hook_id"
|
|
},
|
|
updateInformationAboutPagesSite: {
|
|
method: "PUT",
|
|
params: {
|
|
cname: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
source: {
|
|
enum: ['"gh-pages"', '"master"', '"master /docs"'],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/pages"
|
|
},
|
|
updateInvitation: {
|
|
method: "PATCH",
|
|
params: {
|
|
invitation_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permissions: {
|
|
enum: ["read", "write", "admin"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/invitations/:invitation_id"
|
|
},
|
|
updateProtectedBranchPullRequestReviewEnforcement: {
|
|
method: "PATCH",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
dismiss_stale_reviews: {
|
|
type: "boolean"
|
|
},
|
|
dismissal_restrictions: {
|
|
type: "object"
|
|
},
|
|
"dismissal_restrictions.teams": {
|
|
type: "string[]"
|
|
},
|
|
"dismissal_restrictions.users": {
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
require_code_owner_reviews: {
|
|
type: "boolean"
|
|
},
|
|
required_approving_review_count: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews"
|
|
},
|
|
updateProtectedBranchRequiredStatusChecks: {
|
|
method: "PATCH",
|
|
params: {
|
|
branch: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
contexts: {
|
|
type: "string[]"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
strict: {
|
|
type: "boolean"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks"
|
|
},
|
|
updateRelease: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
draft: {
|
|
type: "boolean"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
prerelease: {
|
|
type: "boolean"
|
|
},
|
|
release_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
tag_name: {
|
|
type: "string"
|
|
},
|
|
target_commitish: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/:release_id"
|
|
},
|
|
updateReleaseAsset: {
|
|
method: "PATCH",
|
|
params: {
|
|
asset_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
label: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/repos/:owner/:repo/releases/assets/:asset_id"
|
|
},
|
|
uploadReleaseAsset: {
|
|
method: "POST",
|
|
params: {
|
|
data: {
|
|
mapTo: "data",
|
|
required: true,
|
|
type: "string | object"
|
|
},
|
|
file: {
|
|
alias: "data",
|
|
deprecated: true,
|
|
type: "string | object"
|
|
},
|
|
headers: {
|
|
required: true,
|
|
type: "object"
|
|
},
|
|
"headers.content-length": {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
"headers.content-type": {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
label: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
url: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: ":url"
|
|
}
|
|
},
|
|
search: {
|
|
code: {
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["indexed"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/code"
|
|
},
|
|
commits: {
|
|
headers: {
|
|
accept: "application/vnd.github.cloak-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["author-date", "committer-date"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/commits"
|
|
},
|
|
issues: {
|
|
deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)",
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/issues"
|
|
},
|
|
issuesAndPullRequests: {
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/issues"
|
|
},
|
|
labels: {
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repository_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
sort: {
|
|
enum: ["created", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/labels"
|
|
},
|
|
repos: {
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["stars", "forks", "help-wanted-issues", "updated"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/repositories"
|
|
},
|
|
topics: {
|
|
method: "GET",
|
|
params: {
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/topics"
|
|
},
|
|
users: {
|
|
method: "GET",
|
|
params: {
|
|
order: {
|
|
enum: ["desc", "asc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
q: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
sort: {
|
|
enum: ["followers", "repositories", "joined"],
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/search/users"
|
|
}
|
|
},
|
|
teams: {
|
|
addMember: {
|
|
deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)",
|
|
method: "PUT",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
addMemberLegacy: {
|
|
deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy",
|
|
method: "PUT",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
addOrUpdateMembership: {
|
|
deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)",
|
|
method: "PUT",
|
|
params: {
|
|
role: {
|
|
enum: ["member", "maintainer"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
addOrUpdateMembershipInOrg: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
role: {
|
|
enum: ["member", "maintainer"],
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/memberships/:username"
|
|
},
|
|
addOrUpdateMembershipLegacy: {
|
|
deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy",
|
|
method: "PUT",
|
|
params: {
|
|
role: {
|
|
enum: ["member", "maintainer"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
addOrUpdateProject: {
|
|
deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
permission: {
|
|
enum: ["read", "write", "admin"],
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
addOrUpdateProjectInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permission: {
|
|
enum: ["read", "write", "admin"],
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/projects/:project_id"
|
|
},
|
|
addOrUpdateProjectLegacy: {
|
|
deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "PUT",
|
|
params: {
|
|
permission: {
|
|
enum: ["read", "write", "admin"],
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
addOrUpdateRepo: {
|
|
deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)",
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
addOrUpdateRepoInOrg: {
|
|
method: "PUT",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
|
|
},
|
|
addOrUpdateRepoLegacy: {
|
|
deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy",
|
|
method: "PUT",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
checkManagesRepo: {
|
|
deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
checkManagesRepoInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
|
|
},
|
|
checkManagesRepoLegacy: {
|
|
deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy",
|
|
method: "GET",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
create: {
|
|
method: "POST",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
maintainers: {
|
|
type: "string[]"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
parent_team_id: {
|
|
type: "integer"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
privacy: {
|
|
enum: ["secret", "closed"],
|
|
type: "string"
|
|
},
|
|
repo_names: {
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams"
|
|
},
|
|
createDiscussion: {
|
|
deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)",
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions"
|
|
},
|
|
createDiscussionComment: {
|
|
deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)",
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments"
|
|
},
|
|
createDiscussionCommentInOrg: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
|
|
},
|
|
createDiscussionCommentLegacy: {
|
|
deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy",
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments"
|
|
},
|
|
createDiscussionInOrg: {
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions"
|
|
},
|
|
createDiscussionLegacy: {
|
|
deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy",
|
|
method: "POST",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
private: {
|
|
type: "boolean"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
title: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions"
|
|
},
|
|
delete: {
|
|
deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
},
|
|
deleteDiscussion: {
|
|
deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
deleteDiscussionComment: {
|
|
deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
deleteDiscussionCommentInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
deleteDiscussionCommentLegacy: {
|
|
deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
deleteDiscussionInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
|
|
},
|
|
deleteDiscussionLegacy: {
|
|
deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
deleteInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug"
|
|
},
|
|
deleteLegacy: {
|
|
deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
},
|
|
get: {
|
|
deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
},
|
|
getByName: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug"
|
|
},
|
|
getDiscussion: {
|
|
deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
getDiscussionComment: {
|
|
deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
getDiscussionCommentInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
getDiscussionCommentLegacy: {
|
|
deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy",
|
|
method: "GET",
|
|
params: {
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
getDiscussionInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
|
|
},
|
|
getDiscussionLegacy: {
|
|
deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy",
|
|
method: "GET",
|
|
params: {
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
getLegacy: {
|
|
deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
},
|
|
getMember: {
|
|
deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
getMemberLegacy: {
|
|
deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
getMembership: {
|
|
deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
getMembershipInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/memberships/:username"
|
|
},
|
|
getMembershipLegacy: {
|
|
deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy",
|
|
method: "GET",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams"
|
|
},
|
|
listChild: {
|
|
deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/teams"
|
|
},
|
|
listChildInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/teams"
|
|
},
|
|
listChildLegacy: {
|
|
deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/teams"
|
|
},
|
|
listDiscussionComments: {
|
|
deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments"
|
|
},
|
|
listDiscussionCommentsInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments"
|
|
},
|
|
listDiscussionCommentsLegacy: {
|
|
deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy",
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments"
|
|
},
|
|
listDiscussions: {
|
|
deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions"
|
|
},
|
|
listDiscussionsInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions"
|
|
},
|
|
listDiscussionsLegacy: {
|
|
deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy",
|
|
method: "GET",
|
|
params: {
|
|
direction: {
|
|
enum: ["asc", "desc"],
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions"
|
|
},
|
|
listForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/teams"
|
|
},
|
|
listMembers: {
|
|
deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
role: {
|
|
enum: ["member", "maintainer", "all"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members"
|
|
},
|
|
listMembersInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
role: {
|
|
enum: ["member", "maintainer", "all"],
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/members"
|
|
},
|
|
listMembersLegacy: {
|
|
deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
role: {
|
|
enum: ["member", "maintainer", "all"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members"
|
|
},
|
|
listPendingInvitations: {
|
|
deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/invitations"
|
|
},
|
|
listPendingInvitationsInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/invitations"
|
|
},
|
|
listPendingInvitationsLegacy: {
|
|
deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/invitations"
|
|
},
|
|
listProjects: {
|
|
deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects"
|
|
},
|
|
listProjectsInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/projects"
|
|
},
|
|
listProjectsLegacy: {
|
|
deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects"
|
|
},
|
|
listRepos: {
|
|
deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos"
|
|
},
|
|
listReposInOrg: {
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/repos"
|
|
},
|
|
listReposLegacy: {
|
|
deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy",
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos"
|
|
},
|
|
removeMember: {
|
|
deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
removeMemberLegacy: {
|
|
deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/members/:username"
|
|
},
|
|
removeMembership: {
|
|
deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
removeMembershipInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/memberships/:username"
|
|
},
|
|
removeMembershipLegacy: {
|
|
deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/memberships/:username"
|
|
},
|
|
removeProject: {
|
|
deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
removeProjectInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/projects/:project_id"
|
|
},
|
|
removeProjectLegacy: {
|
|
deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
removeRepo: {
|
|
deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)",
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
removeRepoInOrg: {
|
|
method: "DELETE",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo"
|
|
},
|
|
removeRepoLegacy: {
|
|
deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy",
|
|
method: "DELETE",
|
|
params: {
|
|
owner: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
repo: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/repos/:owner/:repo"
|
|
},
|
|
reviewProject: {
|
|
deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
reviewProjectInOrg: {
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/projects/:project_id"
|
|
},
|
|
reviewProjectLegacy: {
|
|
deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy",
|
|
headers: {
|
|
accept: "application/vnd.github.inertia-preview+json"
|
|
},
|
|
method: "GET",
|
|
params: {
|
|
project_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/projects/:project_id"
|
|
},
|
|
update: {
|
|
deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)",
|
|
method: "PATCH",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
parent_team_id: {
|
|
type: "integer"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
privacy: {
|
|
enum: ["secret", "closed"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
},
|
|
updateDiscussion: {
|
|
deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)",
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
updateDiscussionComment: {
|
|
deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)",
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
updateDiscussionCommentInOrg: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
updateDiscussionCommentLegacy: {
|
|
deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy",
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
comment_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number"
|
|
},
|
|
updateDiscussionInOrg: {
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number"
|
|
},
|
|
updateDiscussionLegacy: {
|
|
deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy",
|
|
method: "PATCH",
|
|
params: {
|
|
body: {
|
|
type: "string"
|
|
},
|
|
discussion_number: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/teams/:team_id/discussions/:discussion_number"
|
|
},
|
|
updateInOrg: {
|
|
method: "PATCH",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
org: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
parent_team_id: {
|
|
type: "integer"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
privacy: {
|
|
enum: ["secret", "closed"],
|
|
type: "string"
|
|
},
|
|
team_slug: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/orgs/:org/teams/:team_slug"
|
|
},
|
|
updateLegacy: {
|
|
deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy",
|
|
method: "PATCH",
|
|
params: {
|
|
description: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
parent_team_id: {
|
|
type: "integer"
|
|
},
|
|
permission: {
|
|
enum: ["pull", "push", "admin"],
|
|
type: "string"
|
|
},
|
|
privacy: {
|
|
enum: ["secret", "closed"],
|
|
type: "string"
|
|
},
|
|
team_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/teams/:team_id"
|
|
}
|
|
},
|
|
users: {
|
|
addEmails: {
|
|
method: "POST",
|
|
params: {
|
|
emails: {
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/user/emails"
|
|
},
|
|
block: {
|
|
method: "PUT",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/blocks/:username"
|
|
},
|
|
checkBlocked: {
|
|
method: "GET",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/blocks/:username"
|
|
},
|
|
checkFollowing: {
|
|
method: "GET",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/following/:username"
|
|
},
|
|
checkFollowingForUser: {
|
|
method: "GET",
|
|
params: {
|
|
target_user: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/following/:target_user"
|
|
},
|
|
createGpgKey: {
|
|
method: "POST",
|
|
params: {
|
|
armored_public_key: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/gpg_keys"
|
|
},
|
|
createPublicKey: {
|
|
method: "POST",
|
|
params: {
|
|
key: {
|
|
type: "string"
|
|
},
|
|
title: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/keys"
|
|
},
|
|
deleteEmails: {
|
|
method: "DELETE",
|
|
params: {
|
|
emails: {
|
|
required: true,
|
|
type: "string[]"
|
|
}
|
|
},
|
|
url: "/user/emails"
|
|
},
|
|
deleteGpgKey: {
|
|
method: "DELETE",
|
|
params: {
|
|
gpg_key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/gpg_keys/:gpg_key_id"
|
|
},
|
|
deletePublicKey: {
|
|
method: "DELETE",
|
|
params: {
|
|
key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/keys/:key_id"
|
|
},
|
|
follow: {
|
|
method: "PUT",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/following/:username"
|
|
},
|
|
getAuthenticated: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/user"
|
|
},
|
|
getByUsername: {
|
|
method: "GET",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username"
|
|
},
|
|
getContextForUser: {
|
|
method: "GET",
|
|
params: {
|
|
subject_id: {
|
|
type: "string"
|
|
},
|
|
subject_type: {
|
|
enum: ["organization", "repository", "issue", "pull_request"],
|
|
type: "string"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/hovercard"
|
|
},
|
|
getGpgKey: {
|
|
method: "GET",
|
|
params: {
|
|
gpg_key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/gpg_keys/:gpg_key_id"
|
|
},
|
|
getPublicKey: {
|
|
method: "GET",
|
|
params: {
|
|
key_id: {
|
|
required: true,
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/keys/:key_id"
|
|
},
|
|
list: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
since: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users"
|
|
},
|
|
listBlocked: {
|
|
method: "GET",
|
|
params: {},
|
|
url: "/user/blocks"
|
|
},
|
|
listEmails: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/emails"
|
|
},
|
|
listFollowersForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/followers"
|
|
},
|
|
listFollowersForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/followers"
|
|
},
|
|
listFollowingForAuthenticatedUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/following"
|
|
},
|
|
listFollowingForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/following"
|
|
},
|
|
listGpgKeys: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/gpg_keys"
|
|
},
|
|
listGpgKeysForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/gpg_keys"
|
|
},
|
|
listPublicEmails: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/public_emails"
|
|
},
|
|
listPublicKeys: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
}
|
|
},
|
|
url: "/user/keys"
|
|
},
|
|
listPublicKeysForUser: {
|
|
method: "GET",
|
|
params: {
|
|
page: {
|
|
type: "integer"
|
|
},
|
|
per_page: {
|
|
type: "integer"
|
|
},
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/users/:username/keys"
|
|
},
|
|
togglePrimaryEmailVisibility: {
|
|
method: "PATCH",
|
|
params: {
|
|
email: {
|
|
required: true,
|
|
type: "string"
|
|
},
|
|
visibility: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/email/visibility"
|
|
},
|
|
unblock: {
|
|
method: "DELETE",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/blocks/:username"
|
|
},
|
|
unfollow: {
|
|
method: "DELETE",
|
|
params: {
|
|
username: {
|
|
required: true,
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user/following/:username"
|
|
},
|
|
updateAuthenticated: {
|
|
method: "PATCH",
|
|
params: {
|
|
bio: {
|
|
type: "string"
|
|
},
|
|
blog: {
|
|
type: "string"
|
|
},
|
|
company: {
|
|
type: "string"
|
|
},
|
|
email: {
|
|
type: "string"
|
|
},
|
|
hireable: {
|
|
type: "boolean"
|
|
},
|
|
location: {
|
|
type: "string"
|
|
},
|
|
name: {
|
|
type: "string"
|
|
}
|
|
},
|
|
url: "/user"
|
|
}
|
|
}
|
|
};
|
|
|
|
const VERSION = "2.4.0";
|
|
|
|
function registerEndpoints(octokit, routes) {
|
|
Object.keys(routes).forEach(namespaceName => {
|
|
if (!octokit[namespaceName]) {
|
|
octokit[namespaceName] = {};
|
|
}
|
|
|
|
Object.keys(routes[namespaceName]).forEach(apiName => {
|
|
const apiOptions = routes[namespaceName][apiName];
|
|
const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => {
|
|
if (typeof apiOptions[key] !== "undefined") {
|
|
map[key] = apiOptions[key];
|
|
}
|
|
|
|
return map;
|
|
}, {});
|
|
endpointDefaults.request = {
|
|
validate: apiOptions.params
|
|
};
|
|
let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters.
|
|
// Not the most elegant solution, but we don’t want to move deprecation
|
|
// logic into octokit/endpoint.js as it’s out of scope
|
|
|
|
const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated);
|
|
|
|
if (hasDeprecatedParam) {
|
|
const patch = patchForDeprecation.bind(null, octokit, apiOptions);
|
|
request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`);
|
|
request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`);
|
|
request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`);
|
|
}
|
|
|
|
if (apiOptions.deprecated) {
|
|
octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() {
|
|
octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`));
|
|
octokit[namespaceName][apiName] = request;
|
|
return request.apply(null, arguments);
|
|
}, request);
|
|
return;
|
|
}
|
|
|
|
octokit[namespaceName][apiName] = request;
|
|
});
|
|
});
|
|
}
|
|
|
|
function patchForDeprecation(octokit, apiOptions, method, methodName) {
|
|
const patchedMethod = options => {
|
|
options = Object.assign({}, options);
|
|
Object.keys(options).forEach(key => {
|
|
if (apiOptions.params[key] && apiOptions.params[key].deprecated) {
|
|
const aliasKey = apiOptions.params[key].alias;
|
|
octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`));
|
|
|
|
if (!(aliasKey in options)) {
|
|
options[aliasKey] = options[key];
|
|
}
|
|
|
|
delete options[key];
|
|
}
|
|
});
|
|
return method(options);
|
|
};
|
|
|
|
Object.keys(method).forEach(key => {
|
|
patchedMethod[key] = method[key];
|
|
});
|
|
return patchedMethod;
|
|
}
|
|
|
|
/**
|
|
* This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
|
|
* goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
|
|
* done, we will remove the registerEndpoints methods and return the methods
|
|
* directly as with the other plugins. At that point we will also remove the
|
|
* legacy workarounds and deprecations.
|
|
*
|
|
* See the plan at
|
|
* https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
|
|
*/
|
|
|
|
function restEndpointMethods(octokit) {
|
|
// @ts-ignore
|
|
octokit.registerEndpoints = registerEndpoints.bind(null, octokit);
|
|
registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility
|
|
// See https://github.com/octokit/rest.js/pull/1134
|
|
|
|
[["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => {
|
|
Object.defineProperty(octokit, deprecatedScope, {
|
|
get() {
|
|
octokit.log.warn( // @ts-ignore
|
|
new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore
|
|
|
|
return octokit[scope];
|
|
}
|
|
|
|
});
|
|
});
|
|
return {};
|
|
}
|
|
restEndpointMethods.VERSION = VERSION;
|
|
|
|
exports.restEndpointMethods = restEndpointMethods;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 850:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = paginationMethodsPlugin
|
|
|
|
function paginationMethodsPlugin (octokit) {
|
|
octokit.getFirstPage = __webpack_require__(777).bind(null, octokit)
|
|
octokit.getLastPage = __webpack_require__(649).bind(null, octokit)
|
|
octokit.getNextPage = __webpack_require__(550).bind(null, octokit)
|
|
octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit)
|
|
octokit.hasFirstPage = __webpack_require__(536)
|
|
octokit.hasLastPage = __webpack_require__(336)
|
|
octokit.hasNextPage = __webpack_require__(929)
|
|
octokit.hasPreviousPage = __webpack_require__(558)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 854:
|
|
/***/ (function(module) {
|
|
|
|
/**
|
|
* lodash (Custom Build) <https://lodash.com/>
|
|
* Build: `lodash modularize exports="npm" -o ./`
|
|
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
|
* Released under MIT license <https://lodash.com/license>
|
|
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
|
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
|
*/
|
|
|
|
/** Used as the `TypeError` message for "Functions" methods. */
|
|
var FUNC_ERROR_TEXT = 'Expected a function';
|
|
|
|
/** Used to stand-in for `undefined` hash values. */
|
|
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
|
|
|
/** Used as references for various `Number` constants. */
|
|
var INFINITY = 1 / 0;
|
|
|
|
/** `Object#toString` result references. */
|
|
var funcTag = '[object Function]',
|
|
genTag = '[object GeneratorFunction]',
|
|
symbolTag = '[object Symbol]';
|
|
|
|
/** Used to match property names within property paths. */
|
|
var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
|
|
reIsPlainProp = /^\w*$/,
|
|
reLeadingDot = /^\./,
|
|
rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
|
|
|
|
/**
|
|
* Used to match `RegExp`
|
|
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
|
*/
|
|
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
|
|
|
/** Used to match backslashes in property paths. */
|
|
var reEscapeChar = /\\(\\)?/g;
|
|
|
|
/** Used to detect host constructors (Safari). */
|
|
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
|
|
|
/** Detect free variable `global` from Node.js. */
|
|
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
|
|
|
/** Detect free variable `self`. */
|
|
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
|
|
|
/** Used as a reference to the global object. */
|
|
var root = freeGlobal || freeSelf || Function('return this')();
|
|
|
|
/**
|
|
* Gets the value at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} [object] The object to query.
|
|
* @param {string} key The key of the property to get.
|
|
* @returns {*} Returns the property value.
|
|
*/
|
|
function getValue(object, key) {
|
|
return object == null ? undefined : object[key];
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a host object in IE < 9.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
|
*/
|
|
function isHostObject(value) {
|
|
// Many host objects are `Object` objects that can coerce to strings
|
|
// despite having improperly defined `toString` methods.
|
|
var result = false;
|
|
if (value != null && typeof value.toString != 'function') {
|
|
try {
|
|
result = !!(value + '');
|
|
} catch (e) {}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/** Used for built-in method references. */
|
|
var arrayProto = Array.prototype,
|
|
funcProto = Function.prototype,
|
|
objectProto = Object.prototype;
|
|
|
|
/** Used to detect overreaching core-js shims. */
|
|
var coreJsData = root['__core-js_shared__'];
|
|
|
|
/** Used to detect methods masquerading as native. */
|
|
var maskSrcKey = (function() {
|
|
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
|
return uid ? ('Symbol(src)_1.' + uid) : '';
|
|
}());
|
|
|
|
/** Used to resolve the decompiled source of functions. */
|
|
var funcToString = funcProto.toString;
|
|
|
|
/** Used to check objects for own properties. */
|
|
var hasOwnProperty = objectProto.hasOwnProperty;
|
|
|
|
/**
|
|
* Used to resolve the
|
|
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
|
* of values.
|
|
*/
|
|
var objectToString = objectProto.toString;
|
|
|
|
/** Used to detect if a method is native. */
|
|
var reIsNative = RegExp('^' +
|
|
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
|
|
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
|
);
|
|
|
|
/** Built-in value references. */
|
|
var Symbol = root.Symbol,
|
|
splice = arrayProto.splice;
|
|
|
|
/* Built-in method references that are verified to be native. */
|
|
var Map = getNative(root, 'Map'),
|
|
nativeCreate = getNative(Object, 'create');
|
|
|
|
/** Used to convert symbols to primitives and strings. */
|
|
var symbolProto = Symbol ? Symbol.prototype : undefined,
|
|
symbolToString = symbolProto ? symbolProto.toString : undefined;
|
|
|
|
/**
|
|
* Creates a hash object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function Hash(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the hash.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf Hash
|
|
*/
|
|
function hashClear() {
|
|
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the hash.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf Hash
|
|
* @param {Object} hash The hash to modify.
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function hashDelete(key) {
|
|
return this.has(key) && delete this.__data__[key];
|
|
}
|
|
|
|
/**
|
|
* Gets the hash value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function hashGet(key) {
|
|
var data = this.__data__;
|
|
if (nativeCreate) {
|
|
var result = data[key];
|
|
return result === HASH_UNDEFINED ? undefined : result;
|
|
}
|
|
return hasOwnProperty.call(data, key) ? data[key] : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if a hash value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function hashHas(key) {
|
|
var data = this.__data__;
|
|
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
|
|
}
|
|
|
|
/**
|
|
* Sets the hash `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the hash instance.
|
|
*/
|
|
function hashSet(key, value) {
|
|
var data = this.__data__;
|
|
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `Hash`.
|
|
Hash.prototype.clear = hashClear;
|
|
Hash.prototype['delete'] = hashDelete;
|
|
Hash.prototype.get = hashGet;
|
|
Hash.prototype.has = hashHas;
|
|
Hash.prototype.set = hashSet;
|
|
|
|
/**
|
|
* Creates an list cache object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function ListCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the list cache.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf ListCache
|
|
*/
|
|
function listCacheClear() {
|
|
this.__data__ = [];
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the list cache.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function listCacheDelete(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
return false;
|
|
}
|
|
var lastIndex = data.length - 1;
|
|
if (index == lastIndex) {
|
|
data.pop();
|
|
} else {
|
|
splice.call(data, index, 1);
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Gets the list cache value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function listCacheGet(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
return index < 0 ? undefined : data[index][1];
|
|
}
|
|
|
|
/**
|
|
* Checks if a list cache value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function listCacheHas(key) {
|
|
return assocIndexOf(this.__data__, key) > -1;
|
|
}
|
|
|
|
/**
|
|
* Sets the list cache `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the list cache instance.
|
|
*/
|
|
function listCacheSet(key, value) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
data.push([key, value]);
|
|
} else {
|
|
data[index][1] = value;
|
|
}
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `ListCache`.
|
|
ListCache.prototype.clear = listCacheClear;
|
|
ListCache.prototype['delete'] = listCacheDelete;
|
|
ListCache.prototype.get = listCacheGet;
|
|
ListCache.prototype.has = listCacheHas;
|
|
ListCache.prototype.set = listCacheSet;
|
|
|
|
/**
|
|
* Creates a map cache object to store key-value pairs.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function MapCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the map.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf MapCache
|
|
*/
|
|
function mapCacheClear() {
|
|
this.__data__ = {
|
|
'hash': new Hash,
|
|
'map': new (Map || ListCache),
|
|
'string': new Hash
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the map.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function mapCacheDelete(key) {
|
|
return getMapData(this, key)['delete'](key);
|
|
}
|
|
|
|
/**
|
|
* Gets the map value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function mapCacheGet(key) {
|
|
return getMapData(this, key).get(key);
|
|
}
|
|
|
|
/**
|
|
* Checks if a map value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function mapCacheHas(key) {
|
|
return getMapData(this, key).has(key);
|
|
}
|
|
|
|
/**
|
|
* Sets the map `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the map cache instance.
|
|
*/
|
|
function mapCacheSet(key, value) {
|
|
getMapData(this, key).set(key, value);
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `MapCache`.
|
|
MapCache.prototype.clear = mapCacheClear;
|
|
MapCache.prototype['delete'] = mapCacheDelete;
|
|
MapCache.prototype.get = mapCacheGet;
|
|
MapCache.prototype.has = mapCacheHas;
|
|
MapCache.prototype.set = mapCacheSet;
|
|
|
|
/**
|
|
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {*} key The key to search for.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function assocIndexOf(array, key) {
|
|
var length = array.length;
|
|
while (length--) {
|
|
if (eq(array[length][0], key)) {
|
|
return length;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.get` without support for default values.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {Array|string} path The path of the property to get.
|
|
* @returns {*} Returns the resolved value.
|
|
*/
|
|
function baseGet(object, path) {
|
|
path = isKey(path, object) ? [path] : castPath(path);
|
|
|
|
var index = 0,
|
|
length = path.length;
|
|
|
|
while (object != null && index < length) {
|
|
object = object[toKey(path[index++])];
|
|
}
|
|
return (index && index == length) ? object : undefined;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.isNative` without bad shim checks.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a native function,
|
|
* else `false`.
|
|
*/
|
|
function baseIsNative(value) {
|
|
if (!isObject(value) || isMasked(value)) {
|
|
return false;
|
|
}
|
|
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
|
return pattern.test(toSource(value));
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.toString` which doesn't convert nullish
|
|
* values to empty strings.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
*/
|
|
function baseToString(value) {
|
|
// Exit early for strings to avoid a performance hit in some environments.
|
|
if (typeof value == 'string') {
|
|
return value;
|
|
}
|
|
if (isSymbol(value)) {
|
|
return symbolToString ? symbolToString.call(value) : '';
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Casts `value` to a path array if it's not one.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {Array} Returns the cast property path array.
|
|
*/
|
|
function castPath(value) {
|
|
return isArray(value) ? value : stringToPath(value);
|
|
}
|
|
|
|
/**
|
|
* Gets the data for `map`.
|
|
*
|
|
* @private
|
|
* @param {Object} map The map to query.
|
|
* @param {string} key The reference key.
|
|
* @returns {*} Returns the map data.
|
|
*/
|
|
function getMapData(map, key) {
|
|
var data = map.__data__;
|
|
return isKeyable(key)
|
|
? data[typeof key == 'string' ? 'string' : 'hash']
|
|
: data.map;
|
|
}
|
|
|
|
/**
|
|
* Gets the native function at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {string} key The key of the method to get.
|
|
* @returns {*} Returns the function if it's native, else `undefined`.
|
|
*/
|
|
function getNative(object, key) {
|
|
var value = getValue(object, key);
|
|
return baseIsNative(value) ? value : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a property name and not a property path.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @param {Object} [object] The object to query keys on.
|
|
* @returns {boolean} Returns `true` if `value` is a property name, else `false`.
|
|
*/
|
|
function isKey(value, object) {
|
|
if (isArray(value)) {
|
|
return false;
|
|
}
|
|
var type = typeof value;
|
|
if (type == 'number' || type == 'symbol' || type == 'boolean' ||
|
|
value == null || isSymbol(value)) {
|
|
return true;
|
|
}
|
|
return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
|
|
(object != null && value in Object(object));
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is suitable for use as unique object key.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
|
*/
|
|
function isKeyable(value) {
|
|
var type = typeof value;
|
|
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
|
? (value !== '__proto__')
|
|
: (value === null);
|
|
}
|
|
|
|
/**
|
|
* Checks if `func` has its source masked.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to check.
|
|
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
|
*/
|
|
function isMasked(func) {
|
|
return !!maskSrcKey && (maskSrcKey in func);
|
|
}
|
|
|
|
/**
|
|
* Converts `string` to a property path array.
|
|
*
|
|
* @private
|
|
* @param {string} string The string to convert.
|
|
* @returns {Array} Returns the property path array.
|
|
*/
|
|
var stringToPath = memoize(function(string) {
|
|
string = toString(string);
|
|
|
|
var result = [];
|
|
if (reLeadingDot.test(string)) {
|
|
result.push('');
|
|
}
|
|
string.replace(rePropName, function(match, number, quote, string) {
|
|
result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
|
|
});
|
|
return result;
|
|
});
|
|
|
|
/**
|
|
* Converts `value` to a string key if it's not a string or symbol.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {string|symbol} Returns the key.
|
|
*/
|
|
function toKey(value) {
|
|
if (typeof value == 'string' || isSymbol(value)) {
|
|
return value;
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Converts `func` to its source code.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to process.
|
|
* @returns {string} Returns the source code.
|
|
*/
|
|
function toSource(func) {
|
|
if (func != null) {
|
|
try {
|
|
return funcToString.call(func);
|
|
} catch (e) {}
|
|
try {
|
|
return (func + '');
|
|
} catch (e) {}
|
|
}
|
|
return '';
|
|
}
|
|
|
|
/**
|
|
* Creates a function that memoizes the result of `func`. If `resolver` is
|
|
* provided, it determines the cache key for storing the result based on the
|
|
* arguments provided to the memoized function. By default, the first argument
|
|
* provided to the memoized function is used as the map cache key. The `func`
|
|
* is invoked with the `this` binding of the memoized function.
|
|
*
|
|
* **Note:** The cache is exposed as the `cache` property on the memoized
|
|
* function. Its creation may be customized by replacing the `_.memoize.Cache`
|
|
* constructor with one whose instances implement the
|
|
* [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
|
|
* method interface of `delete`, `get`, `has`, and `set`.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Function
|
|
* @param {Function} func The function to have its output memoized.
|
|
* @param {Function} [resolver] The function to resolve the cache key.
|
|
* @returns {Function} Returns the new memoized function.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1, 'b': 2 };
|
|
* var other = { 'c': 3, 'd': 4 };
|
|
*
|
|
* var values = _.memoize(_.values);
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* values(other);
|
|
* // => [3, 4]
|
|
*
|
|
* object.a = 2;
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* // Modify the result cache.
|
|
* values.cache.set(object, ['a', 'b']);
|
|
* values(object);
|
|
* // => ['a', 'b']
|
|
*
|
|
* // Replace `_.memoize.Cache`.
|
|
* _.memoize.Cache = WeakMap;
|
|
*/
|
|
function memoize(func, resolver) {
|
|
if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
|
|
throw new TypeError(FUNC_ERROR_TEXT);
|
|
}
|
|
var memoized = function() {
|
|
var args = arguments,
|
|
key = resolver ? resolver.apply(this, args) : args[0],
|
|
cache = memoized.cache;
|
|
|
|
if (cache.has(key)) {
|
|
return cache.get(key);
|
|
}
|
|
var result = func.apply(this, args);
|
|
memoized.cache = cache.set(key, result);
|
|
return result;
|
|
};
|
|
memoized.cache = new (memoize.Cache || MapCache);
|
|
return memoized;
|
|
}
|
|
|
|
// Assign cache to `_.memoize`.
|
|
memoize.Cache = MapCache;
|
|
|
|
/**
|
|
* Performs a
|
|
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* comparison between two values to determine if they are equivalent.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to compare.
|
|
* @param {*} other The other value to compare.
|
|
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1 };
|
|
* var other = { 'a': 1 };
|
|
*
|
|
* _.eq(object, object);
|
|
* // => true
|
|
*
|
|
* _.eq(object, other);
|
|
* // => false
|
|
*
|
|
* _.eq('a', 'a');
|
|
* // => true
|
|
*
|
|
* _.eq('a', Object('a'));
|
|
* // => false
|
|
*
|
|
* _.eq(NaN, NaN);
|
|
* // => true
|
|
*/
|
|
function eq(value, other) {
|
|
return value === other || (value !== value && other !== other);
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as an `Array` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
|
* @example
|
|
*
|
|
* _.isArray([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isArray(document.body.children);
|
|
* // => false
|
|
*
|
|
* _.isArray('abc');
|
|
* // => false
|
|
*
|
|
* _.isArray(_.noop);
|
|
* // => false
|
|
*/
|
|
var isArray = Array.isArray;
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Function` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
|
* @example
|
|
*
|
|
* _.isFunction(_);
|
|
* // => true
|
|
*
|
|
* _.isFunction(/abc/);
|
|
* // => false
|
|
*/
|
|
function isFunction(value) {
|
|
// The use of `Object#toString` avoids issues with the `typeof` operator
|
|
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
|
var tag = isObject(value) ? objectToString.call(value) : '';
|
|
return tag == funcTag || tag == genTag;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is the
|
|
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
|
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObject({});
|
|
* // => true
|
|
*
|
|
* _.isObject([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObject(_.noop);
|
|
* // => true
|
|
*
|
|
* _.isObject(null);
|
|
* // => false
|
|
*/
|
|
function isObject(value) {
|
|
var type = typeof value;
|
|
return !!value && (type == 'object' || type == 'function');
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
|
* and has a `typeof` result of "object".
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObjectLike({});
|
|
* // => true
|
|
*
|
|
* _.isObjectLike([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObjectLike(_.noop);
|
|
* // => false
|
|
*
|
|
* _.isObjectLike(null);
|
|
* // => false
|
|
*/
|
|
function isObjectLike(value) {
|
|
return !!value && typeof value == 'object';
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Symbol` primitive or object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
|
|
* @example
|
|
*
|
|
* _.isSymbol(Symbol.iterator);
|
|
* // => true
|
|
*
|
|
* _.isSymbol('abc');
|
|
* // => false
|
|
*/
|
|
function isSymbol(value) {
|
|
return typeof value == 'symbol' ||
|
|
(isObjectLike(value) && objectToString.call(value) == symbolTag);
|
|
}
|
|
|
|
/**
|
|
* Converts `value` to a string. An empty string is returned for `null`
|
|
* and `undefined` values. The sign of `-0` is preserved.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
* @example
|
|
*
|
|
* _.toString(null);
|
|
* // => ''
|
|
*
|
|
* _.toString(-0);
|
|
* // => '-0'
|
|
*
|
|
* _.toString([1, 2, 3]);
|
|
* // => '1,2,3'
|
|
*/
|
|
function toString(value) {
|
|
return value == null ? '' : baseToString(value);
|
|
}
|
|
|
|
/**
|
|
* Gets the value at `path` of `object`. If the resolved value is
|
|
* `undefined`, the `defaultValue` is returned in its place.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 3.7.0
|
|
* @category Object
|
|
* @param {Object} object The object to query.
|
|
* @param {Array|string} path The path of the property to get.
|
|
* @param {*} [defaultValue] The value returned for `undefined` resolved values.
|
|
* @returns {*} Returns the resolved value.
|
|
* @example
|
|
*
|
|
* var object = { 'a': [{ 'b': { 'c': 3 } }] };
|
|
*
|
|
* _.get(object, 'a[0].b.c');
|
|
* // => 3
|
|
*
|
|
* _.get(object, ['a', '0', 'b', 'c']);
|
|
* // => 3
|
|
*
|
|
* _.get(object, 'a.b.c', 'default');
|
|
* // => 'default'
|
|
*/
|
|
function get(object, path, defaultValue) {
|
|
var result = object == null ? undefined : baseGet(object, path);
|
|
return result === undefined ? defaultValue : result;
|
|
}
|
|
|
|
module.exports = get;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 855:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = registerPlugin;
|
|
|
|
const factory = __webpack_require__(47);
|
|
|
|
function registerPlugin(plugins, pluginFunction) {
|
|
return factory(
|
|
plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction)
|
|
);
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 856:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = __webpack_require__(141);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 863:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationBeforeRequest;
|
|
|
|
const btoa = __webpack_require__(675);
|
|
|
|
const withAuthorizationPrefix = __webpack_require__(143);
|
|
|
|
function authenticationBeforeRequest(state, options) {
|
|
if (typeof state.auth === "string") {
|
|
options.headers.authorization = withAuthorizationPrefix(state.auth);
|
|
return;
|
|
}
|
|
|
|
if (state.auth.username) {
|
|
const hash = btoa(`${state.auth.username}:${state.auth.password}`);
|
|
options.headers.authorization = `Basic ${hash}`;
|
|
if (state.otp) {
|
|
options.headers["x-github-otp"] = state.otp;
|
|
}
|
|
return;
|
|
}
|
|
|
|
if (state.auth.clientId) {
|
|
// There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as
|
|
// Basic Authorization instead of query parameters. The only routes where that applies share the same
|
|
// URL though: `/applications/:client_id/tokens/:access_token`.
|
|
//
|
|
// 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization)
|
|
// 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization)
|
|
// 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application)
|
|
//
|
|
// We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token"
|
|
// as well as "/applications/123/tokens/token456"
|
|
if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) {
|
|
const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`);
|
|
options.headers.authorization = `Basic ${hash}`;
|
|
return;
|
|
}
|
|
|
|
options.url += options.url.indexOf("?") === -1 ? "?" : "&";
|
|
options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`;
|
|
return;
|
|
}
|
|
|
|
return Promise.resolve()
|
|
|
|
.then(() => {
|
|
return state.auth();
|
|
})
|
|
|
|
.then(authorization => {
|
|
options.headers.authorization = withAuthorizationPrefix(authorization);
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 866:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var shebangRegex = __webpack_require__(816);
|
|
|
|
module.exports = function (str) {
|
|
var match = str.match(shebangRegex);
|
|
|
|
if (!match) {
|
|
return null;
|
|
}
|
|
|
|
var arr = match[0].replace(/#! ?/, '').split(' ');
|
|
var bin = arr[0].split('/').pop();
|
|
var arg = arr[1];
|
|
|
|
return (bin === 'env' ?
|
|
arg :
|
|
bin + (arg ? ' ' + arg : '')
|
|
);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 874:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
// Copyright (c) Microsoft. All rights reserved.
|
|
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url = __webpack_require__(835);
|
|
const http = __webpack_require__(605);
|
|
const https = __webpack_require__(34);
|
|
let fs;
|
|
let tunnel;
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
|
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
|
let output = '';
|
|
this.message.on('data', (chunk) => {
|
|
output += chunk;
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output);
|
|
});
|
|
}));
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
let parsedUrl = url.parse(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
var EnvironmentVariables;
|
|
(function (EnvironmentVariables) {
|
|
EnvironmentVariables["HTTP_PROXY"] = "HTTP_PROXY";
|
|
EnvironmentVariables["HTTPS_PROXY"] = "HTTPS_PROXY";
|
|
})(EnvironmentVariables || (EnvironmentVariables = {}));
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
this._httpProxy = requestOptions.proxy;
|
|
if (requestOptions.proxy && requestOptions.proxy.proxyBypassHosts) {
|
|
this._httpProxyBypassHosts = [];
|
|
requestOptions.proxy.proxyBypassHosts.forEach(bypass => {
|
|
this._httpProxyBypassHosts.push(new RegExp(bypass, 'i'));
|
|
});
|
|
}
|
|
this._certConfig = requestOptions.cert;
|
|
if (this._certConfig) {
|
|
// If using cert, need fs
|
|
fs = __webpack_require__(747);
|
|
// cache the cert content into memory, so we don't have to read it from disk every time
|
|
if (this._certConfig.caFile && fs.existsSync(this._certConfig.caFile)) {
|
|
this._ca = fs.readFileSync(this._certConfig.caFile, 'utf8');
|
|
}
|
|
if (this._certConfig.certFile && fs.existsSync(this._certConfig.certFile)) {
|
|
this._cert = fs.readFileSync(this._certConfig.certFile, 'utf8');
|
|
}
|
|
if (this._certConfig.keyFile && fs.existsSync(this._certConfig.keyFile)) {
|
|
this._key = fs.readFileSync(this._certConfig.keyFile, 'utf8');
|
|
}
|
|
}
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
request(verb, requestUrl, data, headers) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this._disposed) {
|
|
throw new Error("Client has already been disposed.");
|
|
}
|
|
let info = this._prepareRequest(verb, requestUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
|
let numTries = 0;
|
|
let response;
|
|
while (numTries < maxTries) {
|
|
response = yield this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (let i = 0; i < this.handlers.length; i++) {
|
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|
authenticationHandler = this.handlers[i];
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
|
&& this._allowRedirects
|
|
&& redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers["location"];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
yield response.readBody();
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, redirectUrl, headers);
|
|
response = yield this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
yield response.readBody();
|
|
yield this._performExponentialBackoff(numTries);
|
|
}
|
|
}
|
|
return response;
|
|
});
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return new Promise((resolve, reject) => {
|
|
let callbackForResult = function (err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
resolve(res);
|
|
};
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
let socket;
|
|
let isDataString = typeof (data) === 'string';
|
|
if (typeof (data) === 'string') {
|
|
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
let handleResult = (err, res) => {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
};
|
|
let req = info.httpModule.request(info.options, (msg) => {
|
|
let res = new HttpClientResponse(msg);
|
|
handleResult(null, res);
|
|
});
|
|
req.on('socket', (sock) => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err, null);
|
|
});
|
|
if (data && typeof (data) === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof (data) !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = url.parse(requestUrl);
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
|
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
info.options.headers["user-agent"] = this.userAgent;
|
|
info.options.agent = this._getAgent(requestUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers && !this._isPresigned(requestUrl)) {
|
|
this.handlers.forEach((handler) => {
|
|
handler.prepareRequest(info.options);
|
|
});
|
|
}
|
|
return info;
|
|
}
|
|
_isPresigned(requestUrl) {
|
|
if (this.requestOptions && this.requestOptions.presignedUrlPatterns) {
|
|
const patterns = this.requestOptions.presignedUrlPatterns;
|
|
for (let i = 0; i < patterns.length; i++) {
|
|
if (requestUrl.match(patterns[i])) {
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getAgent(requestUrl) {
|
|
let agent;
|
|
let proxy = this._getProxy(requestUrl);
|
|
let useProxy = proxy.proxyUrl && proxy.proxyUrl.hostname && !this._isBypassProxy(requestUrl);
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (!!agent) {
|
|
return agent;
|
|
}
|
|
let parsedUrl = url.parse(requestUrl);
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (!!this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
if (useProxy) {
|
|
// If using proxy, need tunnel
|
|
if (!tunnel) {
|
|
tunnel = __webpack_require__(856);
|
|
}
|
|
const agentOptions = {
|
|
maxSockets: maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: {
|
|
proxyAuth: proxy.proxyAuth,
|
|
host: proxy.proxyUrl.hostname,
|
|
port: proxy.proxyUrl.port
|
|
},
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxy.proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
|
}
|
|
if (usingSsl && this._certConfig) {
|
|
agent.options = Object.assign(agent.options || {}, { ca: this._ca, cert: this._cert, key: this._key, passphrase: this._certConfig.passphrase });
|
|
}
|
|
return agent;
|
|
}
|
|
_getProxy(requestUrl) {
|
|
const parsedUrl = url.parse(requestUrl);
|
|
let usingSsl = parsedUrl.protocol === 'https:';
|
|
let proxyConfig = this._httpProxy;
|
|
// fallback to http_proxy and https_proxy env
|
|
let https_proxy = process.env[EnvironmentVariables.HTTPS_PROXY];
|
|
let http_proxy = process.env[EnvironmentVariables.HTTP_PROXY];
|
|
if (!proxyConfig) {
|
|
if (https_proxy && usingSsl) {
|
|
proxyConfig = {
|
|
proxyUrl: https_proxy
|
|
};
|
|
}
|
|
else if (http_proxy) {
|
|
proxyConfig = {
|
|
proxyUrl: http_proxy
|
|
};
|
|
}
|
|
}
|
|
let proxyUrl;
|
|
let proxyAuth;
|
|
if (proxyConfig) {
|
|
if (proxyConfig.proxyUrl.length > 0) {
|
|
proxyUrl = url.parse(proxyConfig.proxyUrl);
|
|
}
|
|
if (proxyConfig.proxyUsername || proxyConfig.proxyPassword) {
|
|
proxyAuth = proxyConfig.proxyUsername + ":" + proxyConfig.proxyPassword;
|
|
}
|
|
}
|
|
return { proxyUrl: proxyUrl, proxyAuth: proxyAuth };
|
|
}
|
|
_isBypassProxy(requestUrl) {
|
|
if (!this._httpProxyBypassHosts) {
|
|
return false;
|
|
}
|
|
let bypass = false;
|
|
this._httpProxyBypassHosts.forEach(bypassHost => {
|
|
if (bypassHost.test(requestUrl)) {
|
|
bypass = true;
|
|
}
|
|
});
|
|
return bypass;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 881:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const isWin = process.platform === 'win32';
|
|
|
|
function notFoundError(original, syscall) {
|
|
return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
|
|
code: 'ENOENT',
|
|
errno: 'ENOENT',
|
|
syscall: `${syscall} ${original.command}`,
|
|
path: original.command,
|
|
spawnargs: original.args,
|
|
});
|
|
}
|
|
|
|
function hookChildProcess(cp, parsed) {
|
|
if (!isWin) {
|
|
return;
|
|
}
|
|
|
|
const originalEmit = cp.emit;
|
|
|
|
cp.emit = function (name, arg1) {
|
|
// If emitting "exit" event and exit code is 1, we need to check if
|
|
// the command exists and emit an "error" instead
|
|
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|
if (name === 'exit') {
|
|
const err = verifyENOENT(arg1, parsed, 'spawn');
|
|
|
|
if (err) {
|
|
return originalEmit.call(cp, 'error', err);
|
|
}
|
|
}
|
|
|
|
return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
|
|
};
|
|
}
|
|
|
|
function verifyENOENT(status, parsed) {
|
|
if (isWin && status === 1 && !parsed.file) {
|
|
return notFoundError(parsed.original, 'spawn');
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function verifyENOENTSync(status, parsed) {
|
|
if (isWin && status === 1 && !parsed.file) {
|
|
return notFoundError(parsed.original, 'spawnSync');
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
module.exports = {
|
|
hookChildProcess,
|
|
verifyENOENT,
|
|
verifyENOENTSync,
|
|
notFoundError,
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 883:
|
|
/***/ (function(module) {
|
|
|
|
/**
|
|
* lodash (Custom Build) <https://lodash.com/>
|
|
* Build: `lodash modularize exports="npm" -o ./`
|
|
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
|
* Released under MIT license <https://lodash.com/license>
|
|
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
|
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
|
*/
|
|
|
|
/** Used as the `TypeError` message for "Functions" methods. */
|
|
var FUNC_ERROR_TEXT = 'Expected a function';
|
|
|
|
/** Used to stand-in for `undefined` hash values. */
|
|
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
|
|
|
/** Used as references for various `Number` constants. */
|
|
var INFINITY = 1 / 0,
|
|
MAX_SAFE_INTEGER = 9007199254740991;
|
|
|
|
/** `Object#toString` result references. */
|
|
var funcTag = '[object Function]',
|
|
genTag = '[object GeneratorFunction]',
|
|
symbolTag = '[object Symbol]';
|
|
|
|
/** Used to match property names within property paths. */
|
|
var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
|
|
reIsPlainProp = /^\w*$/,
|
|
reLeadingDot = /^\./,
|
|
rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
|
|
|
|
/**
|
|
* Used to match `RegExp`
|
|
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
|
*/
|
|
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
|
|
|
/** Used to match backslashes in property paths. */
|
|
var reEscapeChar = /\\(\\)?/g;
|
|
|
|
/** Used to detect host constructors (Safari). */
|
|
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
|
|
|
/** Used to detect unsigned integer values. */
|
|
var reIsUint = /^(?:0|[1-9]\d*)$/;
|
|
|
|
/** Detect free variable `global` from Node.js. */
|
|
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
|
|
|
/** Detect free variable `self`. */
|
|
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
|
|
|
/** Used as a reference to the global object. */
|
|
var root = freeGlobal || freeSelf || Function('return this')();
|
|
|
|
/**
|
|
* Gets the value at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} [object] The object to query.
|
|
* @param {string} key The key of the property to get.
|
|
* @returns {*} Returns the property value.
|
|
*/
|
|
function getValue(object, key) {
|
|
return object == null ? undefined : object[key];
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a host object in IE < 9.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
|
*/
|
|
function isHostObject(value) {
|
|
// Many host objects are `Object` objects that can coerce to strings
|
|
// despite having improperly defined `toString` methods.
|
|
var result = false;
|
|
if (value != null && typeof value.toString != 'function') {
|
|
try {
|
|
result = !!(value + '');
|
|
} catch (e) {}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/** Used for built-in method references. */
|
|
var arrayProto = Array.prototype,
|
|
funcProto = Function.prototype,
|
|
objectProto = Object.prototype;
|
|
|
|
/** Used to detect overreaching core-js shims. */
|
|
var coreJsData = root['__core-js_shared__'];
|
|
|
|
/** Used to detect methods masquerading as native. */
|
|
var maskSrcKey = (function() {
|
|
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
|
return uid ? ('Symbol(src)_1.' + uid) : '';
|
|
}());
|
|
|
|
/** Used to resolve the decompiled source of functions. */
|
|
var funcToString = funcProto.toString;
|
|
|
|
/** Used to check objects for own properties. */
|
|
var hasOwnProperty = objectProto.hasOwnProperty;
|
|
|
|
/**
|
|
* Used to resolve the
|
|
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
|
* of values.
|
|
*/
|
|
var objectToString = objectProto.toString;
|
|
|
|
/** Used to detect if a method is native. */
|
|
var reIsNative = RegExp('^' +
|
|
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
|
|
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
|
);
|
|
|
|
/** Built-in value references. */
|
|
var Symbol = root.Symbol,
|
|
splice = arrayProto.splice;
|
|
|
|
/* Built-in method references that are verified to be native. */
|
|
var Map = getNative(root, 'Map'),
|
|
nativeCreate = getNative(Object, 'create');
|
|
|
|
/** Used to convert symbols to primitives and strings. */
|
|
var symbolProto = Symbol ? Symbol.prototype : undefined,
|
|
symbolToString = symbolProto ? symbolProto.toString : undefined;
|
|
|
|
/**
|
|
* Creates a hash object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function Hash(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the hash.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf Hash
|
|
*/
|
|
function hashClear() {
|
|
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the hash.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf Hash
|
|
* @param {Object} hash The hash to modify.
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function hashDelete(key) {
|
|
return this.has(key) && delete this.__data__[key];
|
|
}
|
|
|
|
/**
|
|
* Gets the hash value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function hashGet(key) {
|
|
var data = this.__data__;
|
|
if (nativeCreate) {
|
|
var result = data[key];
|
|
return result === HASH_UNDEFINED ? undefined : result;
|
|
}
|
|
return hasOwnProperty.call(data, key) ? data[key] : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if a hash value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function hashHas(key) {
|
|
var data = this.__data__;
|
|
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
|
|
}
|
|
|
|
/**
|
|
* Sets the hash `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the hash instance.
|
|
*/
|
|
function hashSet(key, value) {
|
|
var data = this.__data__;
|
|
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `Hash`.
|
|
Hash.prototype.clear = hashClear;
|
|
Hash.prototype['delete'] = hashDelete;
|
|
Hash.prototype.get = hashGet;
|
|
Hash.prototype.has = hashHas;
|
|
Hash.prototype.set = hashSet;
|
|
|
|
/**
|
|
* Creates an list cache object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function ListCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the list cache.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf ListCache
|
|
*/
|
|
function listCacheClear() {
|
|
this.__data__ = [];
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the list cache.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function listCacheDelete(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
return false;
|
|
}
|
|
var lastIndex = data.length - 1;
|
|
if (index == lastIndex) {
|
|
data.pop();
|
|
} else {
|
|
splice.call(data, index, 1);
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Gets the list cache value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function listCacheGet(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
return index < 0 ? undefined : data[index][1];
|
|
}
|
|
|
|
/**
|
|
* Checks if a list cache value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function listCacheHas(key) {
|
|
return assocIndexOf(this.__data__, key) > -1;
|
|
}
|
|
|
|
/**
|
|
* Sets the list cache `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the list cache instance.
|
|
*/
|
|
function listCacheSet(key, value) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
data.push([key, value]);
|
|
} else {
|
|
data[index][1] = value;
|
|
}
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `ListCache`.
|
|
ListCache.prototype.clear = listCacheClear;
|
|
ListCache.prototype['delete'] = listCacheDelete;
|
|
ListCache.prototype.get = listCacheGet;
|
|
ListCache.prototype.has = listCacheHas;
|
|
ListCache.prototype.set = listCacheSet;
|
|
|
|
/**
|
|
* Creates a map cache object to store key-value pairs.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function MapCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the map.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf MapCache
|
|
*/
|
|
function mapCacheClear() {
|
|
this.__data__ = {
|
|
'hash': new Hash,
|
|
'map': new (Map || ListCache),
|
|
'string': new Hash
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the map.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function mapCacheDelete(key) {
|
|
return getMapData(this, key)['delete'](key);
|
|
}
|
|
|
|
/**
|
|
* Gets the map value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function mapCacheGet(key) {
|
|
return getMapData(this, key).get(key);
|
|
}
|
|
|
|
/**
|
|
* Checks if a map value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function mapCacheHas(key) {
|
|
return getMapData(this, key).has(key);
|
|
}
|
|
|
|
/**
|
|
* Sets the map `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the map cache instance.
|
|
*/
|
|
function mapCacheSet(key, value) {
|
|
getMapData(this, key).set(key, value);
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `MapCache`.
|
|
MapCache.prototype.clear = mapCacheClear;
|
|
MapCache.prototype['delete'] = mapCacheDelete;
|
|
MapCache.prototype.get = mapCacheGet;
|
|
MapCache.prototype.has = mapCacheHas;
|
|
MapCache.prototype.set = mapCacheSet;
|
|
|
|
/**
|
|
* Assigns `value` to `key` of `object` if the existing value is not equivalent
|
|
* using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* for equality comparisons.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to modify.
|
|
* @param {string} key The key of the property to assign.
|
|
* @param {*} value The value to assign.
|
|
*/
|
|
function assignValue(object, key, value) {
|
|
var objValue = object[key];
|
|
if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) ||
|
|
(value === undefined && !(key in object))) {
|
|
object[key] = value;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {*} key The key to search for.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function assocIndexOf(array, key) {
|
|
var length = array.length;
|
|
while (length--) {
|
|
if (eq(array[length][0], key)) {
|
|
return length;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.isNative` without bad shim checks.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a native function,
|
|
* else `false`.
|
|
*/
|
|
function baseIsNative(value) {
|
|
if (!isObject(value) || isMasked(value)) {
|
|
return false;
|
|
}
|
|
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
|
return pattern.test(toSource(value));
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.set`.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to modify.
|
|
* @param {Array|string} path The path of the property to set.
|
|
* @param {*} value The value to set.
|
|
* @param {Function} [customizer] The function to customize path creation.
|
|
* @returns {Object} Returns `object`.
|
|
*/
|
|
function baseSet(object, path, value, customizer) {
|
|
if (!isObject(object)) {
|
|
return object;
|
|
}
|
|
path = isKey(path, object) ? [path] : castPath(path);
|
|
|
|
var index = -1,
|
|
length = path.length,
|
|
lastIndex = length - 1,
|
|
nested = object;
|
|
|
|
while (nested != null && ++index < length) {
|
|
var key = toKey(path[index]),
|
|
newValue = value;
|
|
|
|
if (index != lastIndex) {
|
|
var objValue = nested[key];
|
|
newValue = customizer ? customizer(objValue, key, nested) : undefined;
|
|
if (newValue === undefined) {
|
|
newValue = isObject(objValue)
|
|
? objValue
|
|
: (isIndex(path[index + 1]) ? [] : {});
|
|
}
|
|
}
|
|
assignValue(nested, key, newValue);
|
|
nested = nested[key];
|
|
}
|
|
return object;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.toString` which doesn't convert nullish
|
|
* values to empty strings.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
*/
|
|
function baseToString(value) {
|
|
// Exit early for strings to avoid a performance hit in some environments.
|
|
if (typeof value == 'string') {
|
|
return value;
|
|
}
|
|
if (isSymbol(value)) {
|
|
return symbolToString ? symbolToString.call(value) : '';
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Casts `value` to a path array if it's not one.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {Array} Returns the cast property path array.
|
|
*/
|
|
function castPath(value) {
|
|
return isArray(value) ? value : stringToPath(value);
|
|
}
|
|
|
|
/**
|
|
* Gets the data for `map`.
|
|
*
|
|
* @private
|
|
* @param {Object} map The map to query.
|
|
* @param {string} key The reference key.
|
|
* @returns {*} Returns the map data.
|
|
*/
|
|
function getMapData(map, key) {
|
|
var data = map.__data__;
|
|
return isKeyable(key)
|
|
? data[typeof key == 'string' ? 'string' : 'hash']
|
|
: data.map;
|
|
}
|
|
|
|
/**
|
|
* Gets the native function at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {string} key The key of the method to get.
|
|
* @returns {*} Returns the function if it's native, else `undefined`.
|
|
*/
|
|
function getNative(object, key) {
|
|
var value = getValue(object, key);
|
|
return baseIsNative(value) ? value : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a valid array-like index.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
|
|
* @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
|
|
*/
|
|
function isIndex(value, length) {
|
|
length = length == null ? MAX_SAFE_INTEGER : length;
|
|
return !!length &&
|
|
(typeof value == 'number' || reIsUint.test(value)) &&
|
|
(value > -1 && value % 1 == 0 && value < length);
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a property name and not a property path.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @param {Object} [object] The object to query keys on.
|
|
* @returns {boolean} Returns `true` if `value` is a property name, else `false`.
|
|
*/
|
|
function isKey(value, object) {
|
|
if (isArray(value)) {
|
|
return false;
|
|
}
|
|
var type = typeof value;
|
|
if (type == 'number' || type == 'symbol' || type == 'boolean' ||
|
|
value == null || isSymbol(value)) {
|
|
return true;
|
|
}
|
|
return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
|
|
(object != null && value in Object(object));
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is suitable for use as unique object key.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
|
*/
|
|
function isKeyable(value) {
|
|
var type = typeof value;
|
|
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
|
? (value !== '__proto__')
|
|
: (value === null);
|
|
}
|
|
|
|
/**
|
|
* Checks if `func` has its source masked.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to check.
|
|
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
|
*/
|
|
function isMasked(func) {
|
|
return !!maskSrcKey && (maskSrcKey in func);
|
|
}
|
|
|
|
/**
|
|
* Converts `string` to a property path array.
|
|
*
|
|
* @private
|
|
* @param {string} string The string to convert.
|
|
* @returns {Array} Returns the property path array.
|
|
*/
|
|
var stringToPath = memoize(function(string) {
|
|
string = toString(string);
|
|
|
|
var result = [];
|
|
if (reLeadingDot.test(string)) {
|
|
result.push('');
|
|
}
|
|
string.replace(rePropName, function(match, number, quote, string) {
|
|
result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
|
|
});
|
|
return result;
|
|
});
|
|
|
|
/**
|
|
* Converts `value` to a string key if it's not a string or symbol.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {string|symbol} Returns the key.
|
|
*/
|
|
function toKey(value) {
|
|
if (typeof value == 'string' || isSymbol(value)) {
|
|
return value;
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Converts `func` to its source code.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to process.
|
|
* @returns {string} Returns the source code.
|
|
*/
|
|
function toSource(func) {
|
|
if (func != null) {
|
|
try {
|
|
return funcToString.call(func);
|
|
} catch (e) {}
|
|
try {
|
|
return (func + '');
|
|
} catch (e) {}
|
|
}
|
|
return '';
|
|
}
|
|
|
|
/**
|
|
* Creates a function that memoizes the result of `func`. If `resolver` is
|
|
* provided, it determines the cache key for storing the result based on the
|
|
* arguments provided to the memoized function. By default, the first argument
|
|
* provided to the memoized function is used as the map cache key. The `func`
|
|
* is invoked with the `this` binding of the memoized function.
|
|
*
|
|
* **Note:** The cache is exposed as the `cache` property on the memoized
|
|
* function. Its creation may be customized by replacing the `_.memoize.Cache`
|
|
* constructor with one whose instances implement the
|
|
* [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
|
|
* method interface of `delete`, `get`, `has`, and `set`.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Function
|
|
* @param {Function} func The function to have its output memoized.
|
|
* @param {Function} [resolver] The function to resolve the cache key.
|
|
* @returns {Function} Returns the new memoized function.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1, 'b': 2 };
|
|
* var other = { 'c': 3, 'd': 4 };
|
|
*
|
|
* var values = _.memoize(_.values);
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* values(other);
|
|
* // => [3, 4]
|
|
*
|
|
* object.a = 2;
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* // Modify the result cache.
|
|
* values.cache.set(object, ['a', 'b']);
|
|
* values(object);
|
|
* // => ['a', 'b']
|
|
*
|
|
* // Replace `_.memoize.Cache`.
|
|
* _.memoize.Cache = WeakMap;
|
|
*/
|
|
function memoize(func, resolver) {
|
|
if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
|
|
throw new TypeError(FUNC_ERROR_TEXT);
|
|
}
|
|
var memoized = function() {
|
|
var args = arguments,
|
|
key = resolver ? resolver.apply(this, args) : args[0],
|
|
cache = memoized.cache;
|
|
|
|
if (cache.has(key)) {
|
|
return cache.get(key);
|
|
}
|
|
var result = func.apply(this, args);
|
|
memoized.cache = cache.set(key, result);
|
|
return result;
|
|
};
|
|
memoized.cache = new (memoize.Cache || MapCache);
|
|
return memoized;
|
|
}
|
|
|
|
// Assign cache to `_.memoize`.
|
|
memoize.Cache = MapCache;
|
|
|
|
/**
|
|
* Performs a
|
|
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* comparison between two values to determine if they are equivalent.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to compare.
|
|
* @param {*} other The other value to compare.
|
|
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1 };
|
|
* var other = { 'a': 1 };
|
|
*
|
|
* _.eq(object, object);
|
|
* // => true
|
|
*
|
|
* _.eq(object, other);
|
|
* // => false
|
|
*
|
|
* _.eq('a', 'a');
|
|
* // => true
|
|
*
|
|
* _.eq('a', Object('a'));
|
|
* // => false
|
|
*
|
|
* _.eq(NaN, NaN);
|
|
* // => true
|
|
*/
|
|
function eq(value, other) {
|
|
return value === other || (value !== value && other !== other);
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as an `Array` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
|
* @example
|
|
*
|
|
* _.isArray([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isArray(document.body.children);
|
|
* // => false
|
|
*
|
|
* _.isArray('abc');
|
|
* // => false
|
|
*
|
|
* _.isArray(_.noop);
|
|
* // => false
|
|
*/
|
|
var isArray = Array.isArray;
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Function` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
|
* @example
|
|
*
|
|
* _.isFunction(_);
|
|
* // => true
|
|
*
|
|
* _.isFunction(/abc/);
|
|
* // => false
|
|
*/
|
|
function isFunction(value) {
|
|
// The use of `Object#toString` avoids issues with the `typeof` operator
|
|
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
|
var tag = isObject(value) ? objectToString.call(value) : '';
|
|
return tag == funcTag || tag == genTag;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is the
|
|
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
|
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObject({});
|
|
* // => true
|
|
*
|
|
* _.isObject([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObject(_.noop);
|
|
* // => true
|
|
*
|
|
* _.isObject(null);
|
|
* // => false
|
|
*/
|
|
function isObject(value) {
|
|
var type = typeof value;
|
|
return !!value && (type == 'object' || type == 'function');
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
|
* and has a `typeof` result of "object".
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObjectLike({});
|
|
* // => true
|
|
*
|
|
* _.isObjectLike([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObjectLike(_.noop);
|
|
* // => false
|
|
*
|
|
* _.isObjectLike(null);
|
|
* // => false
|
|
*/
|
|
function isObjectLike(value) {
|
|
return !!value && typeof value == 'object';
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Symbol` primitive or object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
|
|
* @example
|
|
*
|
|
* _.isSymbol(Symbol.iterator);
|
|
* // => true
|
|
*
|
|
* _.isSymbol('abc');
|
|
* // => false
|
|
*/
|
|
function isSymbol(value) {
|
|
return typeof value == 'symbol' ||
|
|
(isObjectLike(value) && objectToString.call(value) == symbolTag);
|
|
}
|
|
|
|
/**
|
|
* Converts `value` to a string. An empty string is returned for `null`
|
|
* and `undefined` values. The sign of `-0` is preserved.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
* @example
|
|
*
|
|
* _.toString(null);
|
|
* // => ''
|
|
*
|
|
* _.toString(-0);
|
|
* // => '-0'
|
|
*
|
|
* _.toString([1, 2, 3]);
|
|
* // => '1,2,3'
|
|
*/
|
|
function toString(value) {
|
|
return value == null ? '' : baseToString(value);
|
|
}
|
|
|
|
/**
|
|
* Sets the value at `path` of `object`. If a portion of `path` doesn't exist,
|
|
* it's created. Arrays are created for missing index properties while objects
|
|
* are created for all other missing properties. Use `_.setWith` to customize
|
|
* `path` creation.
|
|
*
|
|
* **Note:** This method mutates `object`.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 3.7.0
|
|
* @category Object
|
|
* @param {Object} object The object to modify.
|
|
* @param {Array|string} path The path of the property to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns `object`.
|
|
* @example
|
|
*
|
|
* var object = { 'a': [{ 'b': { 'c': 3 } }] };
|
|
*
|
|
* _.set(object, 'a[0].b.c', 4);
|
|
* console.log(object.a[0].b.c);
|
|
* // => 4
|
|
*
|
|
* _.set(object, ['x', '0', 'y', 'z'], 5);
|
|
* console.log(object.x[0].y.z);
|
|
* // => 5
|
|
*/
|
|
function set(object, path, value) {
|
|
return object == null ? object : baseSet(object, path, value);
|
|
}
|
|
|
|
module.exports = set;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 898:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
var request = __webpack_require__(753);
|
|
var universalUserAgent = __webpack_require__(796);
|
|
|
|
const VERSION = "4.3.1";
|
|
|
|
class GraphqlError extends Error {
|
|
constructor(request, response) {
|
|
const message = response.data.errors[0].message;
|
|
super(message);
|
|
Object.assign(this, response.data);
|
|
this.name = "GraphqlError";
|
|
this.request = request; // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query"];
|
|
function graphql(request, query, options) {
|
|
options = typeof query === "string" ? options = Object.assign({
|
|
query
|
|
}, options) : options = query;
|
|
const requestOptions = Object.keys(options).reduce((result, key) => {
|
|
if (NON_VARIABLE_OPTIONS.includes(key)) {
|
|
result[key] = options[key];
|
|
return result;
|
|
}
|
|
|
|
if (!result.variables) {
|
|
result.variables = {};
|
|
}
|
|
|
|
result.variables[key] = options[key];
|
|
return result;
|
|
}, {});
|
|
return request(requestOptions).then(response => {
|
|
if (response.data.errors) {
|
|
throw new GraphqlError(requestOptions, {
|
|
data: response.data
|
|
});
|
|
}
|
|
|
|
return response.data.data;
|
|
});
|
|
}
|
|
|
|
function withDefaults(request$1, newDefaults) {
|
|
const newRequest = request$1.defaults(newDefaults);
|
|
|
|
const newApi = (query, options) => {
|
|
return graphql(newRequest, query, options);
|
|
};
|
|
|
|
return Object.assign(newApi, {
|
|
defaults: withDefaults.bind(null, newRequest),
|
|
endpoint: request.request.endpoint
|
|
});
|
|
}
|
|
|
|
const graphql$1 = withDefaults(request.request, {
|
|
headers: {
|
|
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|
},
|
|
method: "POST",
|
|
url: "/graphql"
|
|
});
|
|
function withCustomRequest(customRequest) {
|
|
return withDefaults(customRequest, {
|
|
method: "POST",
|
|
url: "/graphql"
|
|
});
|
|
}
|
|
|
|
exports.graphql = graphql$1;
|
|
exports.withCustomRequest = withCustomRequest;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 916:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
const VERSION = "1.0.0";
|
|
|
|
/**
|
|
* @param octokit Octokit instance
|
|
* @param options Options passed to Octokit constructor
|
|
*/
|
|
|
|
function requestLog(octokit) {
|
|
octokit.hook.wrap("request", (request, options) => {
|
|
octokit.log.debug("request", options);
|
|
const start = Date.now();
|
|
const requestOptions = octokit.request.endpoint.parse(options);
|
|
const path = requestOptions.url.replace(options.baseUrl, "");
|
|
return request(options).then(response => {
|
|
octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);
|
|
return response;
|
|
}).catch(error => {
|
|
octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);
|
|
throw error;
|
|
});
|
|
});
|
|
}
|
|
requestLog.VERSION = VERSION;
|
|
|
|
exports.requestLog = requestLog;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 929:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = hasNextPage
|
|
|
|
const deprecate = __webpack_require__(370)
|
|
const getPageLinks = __webpack_require__(577)
|
|
|
|
function hasNextPage (link) {
|
|
deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`)
|
|
return getPageLinks(link).next
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 948:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
/**
|
|
* Tries to execute a function and discards any error that occurs.
|
|
* @param {Function} fn - Function that might or might not throw an error.
|
|
* @returns {?*} Return-value of the function when no error occurred.
|
|
*/
|
|
module.exports = function(fn) {
|
|
|
|
try { return fn() } catch (e) {}
|
|
|
|
}
|
|
|
|
/***/ }),
|
|
|
|
/***/ 950:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url = __webpack_require__(835);
|
|
function getProxyUrl(reqUrl) {
|
|
let usingSsl = reqUrl.protocol === 'https:';
|
|
let proxyUrl;
|
|
if (checkBypass(reqUrl)) {
|
|
return proxyUrl;
|
|
}
|
|
let proxyVar;
|
|
if (usingSsl) {
|
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|
}
|
|
else {
|
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|
}
|
|
if (proxyVar) {
|
|
proxyUrl = url.parse(proxyVar);
|
|
}
|
|
return proxyUrl;
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (let upperNoProxyItem of noProxy
|
|
.split(',')
|
|
.map(x => x.trim().toUpperCase())
|
|
.filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 954:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = validateAuth;
|
|
|
|
function validateAuth(auth) {
|
|
if (typeof auth === "string") {
|
|
return;
|
|
}
|
|
|
|
if (typeof auth === "function") {
|
|
return;
|
|
}
|
|
|
|
if (auth.username && auth.password) {
|
|
return;
|
|
}
|
|
|
|
if (auth.clientId && auth.clientSecret) {
|
|
return;
|
|
}
|
|
|
|
throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`);
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 955:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const path = __webpack_require__(622);
|
|
const childProcess = __webpack_require__(129);
|
|
const crossSpawn = __webpack_require__(20);
|
|
const stripEof = __webpack_require__(768);
|
|
const npmRunPath = __webpack_require__(621);
|
|
const isStream = __webpack_require__(323);
|
|
const _getStream = __webpack_require__(145);
|
|
const pFinally = __webpack_require__(697);
|
|
const onExit = __webpack_require__(260);
|
|
const errname = __webpack_require__(427);
|
|
const stdio = __webpack_require__(168);
|
|
|
|
const TEN_MEGABYTES = 1000 * 1000 * 10;
|
|
|
|
function handleArgs(cmd, args, opts) {
|
|
let parsed;
|
|
|
|
opts = Object.assign({
|
|
extendEnv: true,
|
|
env: {}
|
|
}, opts);
|
|
|
|
if (opts.extendEnv) {
|
|
opts.env = Object.assign({}, process.env, opts.env);
|
|
}
|
|
|
|
if (opts.__winShell === true) {
|
|
delete opts.__winShell;
|
|
parsed = {
|
|
command: cmd,
|
|
args,
|
|
options: opts,
|
|
file: cmd,
|
|
original: {
|
|
cmd,
|
|
args
|
|
}
|
|
};
|
|
} else {
|
|
parsed = crossSpawn._parse(cmd, args, opts);
|
|
}
|
|
|
|
opts = Object.assign({
|
|
maxBuffer: TEN_MEGABYTES,
|
|
buffer: true,
|
|
stripEof: true,
|
|
preferLocal: true,
|
|
localDir: parsed.options.cwd || process.cwd(),
|
|
encoding: 'utf8',
|
|
reject: true,
|
|
cleanup: true
|
|
}, parsed.options);
|
|
|
|
opts.stdio = stdio(opts);
|
|
|
|
if (opts.preferLocal) {
|
|
opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
|
|
}
|
|
|
|
if (opts.detached) {
|
|
// #115
|
|
opts.cleanup = false;
|
|
}
|
|
|
|
if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') {
|
|
// #116
|
|
parsed.args.unshift('/q');
|
|
}
|
|
|
|
return {
|
|
cmd: parsed.command,
|
|
args: parsed.args,
|
|
opts,
|
|
parsed
|
|
};
|
|
}
|
|
|
|
function handleInput(spawned, input) {
|
|
if (input === null || input === undefined) {
|
|
return;
|
|
}
|
|
|
|
if (isStream(input)) {
|
|
input.pipe(spawned.stdin);
|
|
} else {
|
|
spawned.stdin.end(input);
|
|
}
|
|
}
|
|
|
|
function handleOutput(opts, val) {
|
|
if (val && opts.stripEof) {
|
|
val = stripEof(val);
|
|
}
|
|
|
|
return val;
|
|
}
|
|
|
|
function handleShell(fn, cmd, opts) {
|
|
let file = '/bin/sh';
|
|
let args = ['-c', cmd];
|
|
|
|
opts = Object.assign({}, opts);
|
|
|
|
if (process.platform === 'win32') {
|
|
opts.__winShell = true;
|
|
file = process.env.comspec || 'cmd.exe';
|
|
args = ['/s', '/c', `"${cmd}"`];
|
|
opts.windowsVerbatimArguments = true;
|
|
}
|
|
|
|
if (opts.shell) {
|
|
file = opts.shell;
|
|
delete opts.shell;
|
|
}
|
|
|
|
return fn(file, args, opts);
|
|
}
|
|
|
|
function getStream(process, stream, {encoding, buffer, maxBuffer}) {
|
|
if (!process[stream]) {
|
|
return null;
|
|
}
|
|
|
|
let ret;
|
|
|
|
if (!buffer) {
|
|
// TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
|
|
ret = new Promise((resolve, reject) => {
|
|
process[stream]
|
|
.once('end', resolve)
|
|
.once('error', reject);
|
|
});
|
|
} else if (encoding) {
|
|
ret = _getStream(process[stream], {
|
|
encoding,
|
|
maxBuffer
|
|
});
|
|
} else {
|
|
ret = _getStream.buffer(process[stream], {maxBuffer});
|
|
}
|
|
|
|
return ret.catch(err => {
|
|
err.stream = stream;
|
|
err.message = `${stream} ${err.message}`;
|
|
throw err;
|
|
});
|
|
}
|
|
|
|
function makeError(result, options) {
|
|
const {stdout, stderr} = result;
|
|
|
|
let err = result.error;
|
|
const {code, signal} = result;
|
|
|
|
const {parsed, joinedCmd} = options;
|
|
const timedOut = options.timedOut || false;
|
|
|
|
if (!err) {
|
|
let output = '';
|
|
|
|
if (Array.isArray(parsed.opts.stdio)) {
|
|
if (parsed.opts.stdio[2] !== 'inherit') {
|
|
output += output.length > 0 ? stderr : `\n${stderr}`;
|
|
}
|
|
|
|
if (parsed.opts.stdio[1] !== 'inherit') {
|
|
output += `\n${stdout}`;
|
|
}
|
|
} else if (parsed.opts.stdio !== 'inherit') {
|
|
output = `\n${stderr}${stdout}`;
|
|
}
|
|
|
|
err = new Error(`Command failed: ${joinedCmd}${output}`);
|
|
err.code = code < 0 ? errname(code) : code;
|
|
}
|
|
|
|
err.stdout = stdout;
|
|
err.stderr = stderr;
|
|
err.failed = true;
|
|
err.signal = signal || null;
|
|
err.cmd = joinedCmd;
|
|
err.timedOut = timedOut;
|
|
|
|
return err;
|
|
}
|
|
|
|
function joinCmd(cmd, args) {
|
|
let joinedCmd = cmd;
|
|
|
|
if (Array.isArray(args) && args.length > 0) {
|
|
joinedCmd += ' ' + args.join(' ');
|
|
}
|
|
|
|
return joinedCmd;
|
|
}
|
|
|
|
module.exports = (cmd, args, opts) => {
|
|
const parsed = handleArgs(cmd, args, opts);
|
|
const {encoding, buffer, maxBuffer} = parsed.opts;
|
|
const joinedCmd = joinCmd(cmd, args);
|
|
|
|
let spawned;
|
|
try {
|
|
spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts);
|
|
} catch (err) {
|
|
return Promise.reject(err);
|
|
}
|
|
|
|
let removeExitHandler;
|
|
if (parsed.opts.cleanup) {
|
|
removeExitHandler = onExit(() => {
|
|
spawned.kill();
|
|
});
|
|
}
|
|
|
|
let timeoutId = null;
|
|
let timedOut = false;
|
|
|
|
const cleanup = () => {
|
|
if (timeoutId) {
|
|
clearTimeout(timeoutId);
|
|
timeoutId = null;
|
|
}
|
|
|
|
if (removeExitHandler) {
|
|
removeExitHandler();
|
|
}
|
|
};
|
|
|
|
if (parsed.opts.timeout > 0) {
|
|
timeoutId = setTimeout(() => {
|
|
timeoutId = null;
|
|
timedOut = true;
|
|
spawned.kill(parsed.opts.killSignal);
|
|
}, parsed.opts.timeout);
|
|
}
|
|
|
|
const processDone = new Promise(resolve => {
|
|
spawned.on('exit', (code, signal) => {
|
|
cleanup();
|
|
resolve({code, signal});
|
|
});
|
|
|
|
spawned.on('error', err => {
|
|
cleanup();
|
|
resolve({error: err});
|
|
});
|
|
|
|
if (spawned.stdin) {
|
|
spawned.stdin.on('error', err => {
|
|
cleanup();
|
|
resolve({error: err});
|
|
});
|
|
}
|
|
});
|
|
|
|
function destroy() {
|
|
if (spawned.stdout) {
|
|
spawned.stdout.destroy();
|
|
}
|
|
|
|
if (spawned.stderr) {
|
|
spawned.stderr.destroy();
|
|
}
|
|
}
|
|
|
|
const handlePromise = () => pFinally(Promise.all([
|
|
processDone,
|
|
getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}),
|
|
getStream(spawned, 'stderr', {encoding, buffer, maxBuffer})
|
|
]).then(arr => {
|
|
const result = arr[0];
|
|
result.stdout = arr[1];
|
|
result.stderr = arr[2];
|
|
|
|
if (result.error || result.code !== 0 || result.signal !== null) {
|
|
const err = makeError(result, {
|
|
joinedCmd,
|
|
parsed,
|
|
timedOut
|
|
});
|
|
|
|
// TODO: missing some timeout logic for killed
|
|
// https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
|
|
// err.killed = spawned.killed || killed;
|
|
err.killed = err.killed || spawned.killed;
|
|
|
|
if (!parsed.opts.reject) {
|
|
return err;
|
|
}
|
|
|
|
throw err;
|
|
}
|
|
|
|
return {
|
|
stdout: handleOutput(parsed.opts, result.stdout),
|
|
stderr: handleOutput(parsed.opts, result.stderr),
|
|
code: 0,
|
|
failed: false,
|
|
killed: false,
|
|
signal: null,
|
|
cmd: joinedCmd,
|
|
timedOut: false
|
|
};
|
|
}), destroy);
|
|
|
|
crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
|
|
|
|
handleInput(spawned, parsed.opts.input);
|
|
|
|
spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
|
|
spawned.catch = onrejected => handlePromise().catch(onrejected);
|
|
|
|
return spawned;
|
|
};
|
|
|
|
// TODO: set `stderr: 'ignore'` when that option is implemented
|
|
module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout);
|
|
|
|
// TODO: set `stdout: 'ignore'` when that option is implemented
|
|
module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr);
|
|
|
|
module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts);
|
|
|
|
module.exports.sync = (cmd, args, opts) => {
|
|
const parsed = handleArgs(cmd, args, opts);
|
|
const joinedCmd = joinCmd(cmd, args);
|
|
|
|
if (isStream(parsed.opts.input)) {
|
|
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
|
}
|
|
|
|
const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
|
|
result.code = result.status;
|
|
|
|
if (result.error || result.status !== 0 || result.signal !== null) {
|
|
const err = makeError(result, {
|
|
joinedCmd,
|
|
parsed
|
|
});
|
|
|
|
if (!parsed.opts.reject) {
|
|
return err;
|
|
}
|
|
|
|
throw err;
|
|
}
|
|
|
|
return {
|
|
stdout: handleOutput(parsed.opts, result.stdout),
|
|
stderr: handleOutput(parsed.opts, result.stderr),
|
|
code: 0,
|
|
failed: false,
|
|
signal: null,
|
|
cmd: joinedCmd,
|
|
timedOut: false
|
|
};
|
|
};
|
|
|
|
module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 966:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {PassThrough} = __webpack_require__(413);
|
|
|
|
module.exports = options => {
|
|
options = Object.assign({}, options);
|
|
|
|
const {array} = options;
|
|
let {encoding} = options;
|
|
const buffer = encoding === 'buffer';
|
|
let objectMode = false;
|
|
|
|
if (array) {
|
|
objectMode = !(encoding || buffer);
|
|
} else {
|
|
encoding = encoding || 'utf8';
|
|
}
|
|
|
|
if (buffer) {
|
|
encoding = null;
|
|
}
|
|
|
|
let len = 0;
|
|
const ret = [];
|
|
const stream = new PassThrough({objectMode});
|
|
|
|
if (encoding) {
|
|
stream.setEncoding(encoding);
|
|
}
|
|
|
|
stream.on('data', chunk => {
|
|
ret.push(chunk);
|
|
|
|
if (objectMode) {
|
|
len = ret.length;
|
|
} else {
|
|
len += chunk.length;
|
|
}
|
|
});
|
|
|
|
stream.getBufferedValue = () => {
|
|
if (array) {
|
|
return ret;
|
|
}
|
|
|
|
return buffer ? Buffer.concat(ret, len) : ret.join('');
|
|
};
|
|
|
|
stream.getBufferedLength = () => len;
|
|
|
|
return stream;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 969:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var wrappy = __webpack_require__(11)
|
|
module.exports = wrappy(once)
|
|
module.exports.strict = wrappy(onceStrict)
|
|
|
|
once.proto = once(function () {
|
|
Object.defineProperty(Function.prototype, 'once', {
|
|
value: function () {
|
|
return once(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
|
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
|
value: function () {
|
|
return onceStrict(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
})
|
|
|
|
function once (fn) {
|
|
var f = function () {
|
|
if (f.called) return f.value
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
function onceStrict (fn) {
|
|
var f = function () {
|
|
if (f.called)
|
|
throw new Error(f.onceError)
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
var name = fn.name || 'Function wrapped with `once`'
|
|
f.onceError = name + " shouldn't be called more than once"
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 986:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const tr = __webpack_require__(9);
|
|
/**
|
|
* Exec a command.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns Promise<number> exit code
|
|
*/
|
|
function exec(commandLine, args, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const commandArgs = tr.argStringToArray(commandLine);
|
|
if (commandArgs.length === 0) {
|
|
throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
|
|
}
|
|
// Path to tool to execute should be first arg
|
|
const toolPath = commandArgs[0];
|
|
args = commandArgs.slice(1).concat(args || []);
|
|
const runner = new tr.ToolRunner(toolPath, args, options);
|
|
return runner.exec();
|
|
});
|
|
}
|
|
exports.exec = exec;
|
|
//# sourceMappingURL=exec.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 991:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = authenticationRequestError;
|
|
|
|
const { RequestError } = __webpack_require__(497);
|
|
|
|
function authenticationRequestError(state, error, options) {
|
|
if (!error.headers) throw error;
|
|
|
|
const otpRequired = /required/.test(error.headers["x-github-otp"] || "");
|
|
// handle "2FA required" error only
|
|
if (error.status !== 401 || !otpRequired) {
|
|
throw error;
|
|
}
|
|
|
|
if (
|
|
error.status === 401 &&
|
|
otpRequired &&
|
|
error.request &&
|
|
error.request.headers["x-github-otp"]
|
|
) {
|
|
if (state.otp) {
|
|
delete state.otp; // no longer valid, request again
|
|
} else {
|
|
throw new RequestError(
|
|
"Invalid one-time password for two-factor authentication",
|
|
401,
|
|
{
|
|
headers: error.headers,
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
if (typeof state.auth.on2fa !== "function") {
|
|
throw new RequestError(
|
|
"2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication",
|
|
401,
|
|
{
|
|
headers: error.headers,
|
|
request: options
|
|
}
|
|
);
|
|
}
|
|
|
|
return Promise.resolve()
|
|
.then(() => {
|
|
return state.auth.on2fa();
|
|
})
|
|
.then(oneTimePassword => {
|
|
const newOptions = Object.assign(options, {
|
|
headers: Object.assign(options.headers, {
|
|
"x-github-otp": oneTimePassword
|
|
})
|
|
});
|
|
return state.octokit.request(newOptions).then(response => {
|
|
// If OTP still valid, then persist it for following requests
|
|
state.otp = oneTimePassword;
|
|
return response;
|
|
});
|
|
});
|
|
}
|
|
|
|
|
|
/***/ })
|
|
|
|
/******/ }); |