diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..70bf8bad --- /dev/null +++ b/.editorconfig @@ -0,0 +1,5 @@ +root = true + +[*] +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..25f6404d --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @heroku/languages diff --git a/.github/actions/generate-buildpack-matrix/action.yml b/.github/actions/generate-buildpack-matrix/action.yml new file mode 100644 index 00000000..b51eef64 --- /dev/null +++ b/.github/actions/generate-buildpack-matrix/action.yml @@ -0,0 +1,10 @@ +name: Generate Buildpack Matrix +description: "Generates a list of buildpacks containing id and path keys for use in a matrix strategy `include` list" + +outputs: + buildpacks: + description: The list of buildpack (id, path) keys formatted as a JSON array + +runs: + using: node16 + main: index.js diff --git a/.github/actions/generate-buildpack-matrix/index.js b/.github/actions/generate-buildpack-matrix/index.js new file mode 100644 index 00000000..f2e72e3d --- /dev/null +++ b/.github/actions/generate-buildpack-matrix/index.js @@ -0,0 +1,3 @@ +require('../../bootstrap').invokeWith(() => { + return ['generate-buildpack-matrix'] +}) diff --git a/.github/actions/generate-changelog/action.yml b/.github/actions/generate-changelog/action.yml new file mode 100644 index 00000000..704889b5 --- /dev/null +++ b/.github/actions/generate-changelog/action.yml @@ -0,0 +1,18 @@ +name: Generate Changelog +description: "Generates a changelog from one or more buildpacks in a project" + +inputs: + unreleased: + description: If the changelog should be generated from the unreleased section + required: false + version: + description: If the changelog should be generated from a version section + required: false + +outputs: + changelog: + description: Markdown content listing the changes + +runs: + using: node16 + main: index.js diff --git a/.github/actions/generate-changelog/index.js b/.github/actions/generate-changelog/index.js new file mode 100644 index 00000000..ae9917e3 --- /dev/null +++ b/.github/actions/generate-changelog/index.js @@ -0,0 +1,12 @@ +require('../../bootstrap').invokeWith(({ getInput }) => { + const args = ['generate-changelog']; + + if (getInput('unreleased')) { + args.push('--unreleased') + } else if (getInput('version')) { + args.push('--version') + args.push(getInput('version')) + } + + return args +}) diff --git a/.github/actions/prepare-release/action.yml b/.github/actions/prepare-release/action.yml new file mode 100644 index 00000000..1286f398 --- /dev/null +++ b/.github/actions/prepare-release/action.yml @@ -0,0 +1,20 @@ +name: Prepare Buildpack Release +description: "Prepares a buildpack release by bumping the fixed version and updating changelogs" + +inputs: + bump: + description: Which coordinate should be incremented? (major, minor, patch) + required: true + repository_url: + description: The URL of the repository (e.g.; https://github.com/octocat/Hello-World) + default: https://github.com/${{ github.repository }} + +outputs: + from_version: + description: The previous version + to_version: + description: The next version + +runs: + using: node16 + main: index.js diff --git a/.github/actions/prepare-release/index.js b/.github/actions/prepare-release/index.js new file mode 100644 index 00000000..d99cb743 --- /dev/null +++ b/.github/actions/prepare-release/index.js @@ -0,0 +1,11 @@ +require('../../bootstrap').invokeWith(({ getInput }) => { + return [ + 'prepare-release', + + '--bump', + getInput('bump', { required: true }), + + '--repository-url', + getInput('repository_url') + ] +}) diff --git a/.github/actions/update-builder/action.yml b/.github/actions/update-builder/action.yml new file mode 100644 index 00000000..e7c4c7f1 --- /dev/null +++ b/.github/actions/update-builder/action.yml @@ -0,0 +1,22 @@ +name: Update Builder +description: "Prepares a buildpack release by bumping the fixed version and updating changelogs" + +inputs: + buildpack_id: + description: The id of the buildpack + required: true + buildpack_version: + description: The version of the buildpack + required: true + buildpack_uri: + description: The URI of the published buildpack + required: true + builders: + description: A comma-separated list of builders to update + required: true + path: + description: Relative path under $GITHUB_WORKSPACE to execute in + +runs: + using: node16 + main: index.js diff --git a/.github/actions/update-builder/index.js b/.github/actions/update-builder/index.js new file mode 100644 index 00000000..53074c05 --- /dev/null +++ b/.github/actions/update-builder/index.js @@ -0,0 +1,20 @@ +require('../../bootstrap').invokeWith(({ getInput }) => { + return [ + 'update-builder', + + '--path', + getInput('path', { required: true }), + + '--buildpack-id', + getInput('buildpack_id', { required: true }), + + '--buildpack-version', + getInput('buildpack_version', { required: true }), + + '--buildpack-uri', + getInput('buildpack_uri', { required: true }), + + '--builders', + getInput('builders', { required: true }), + ] +}) diff --git a/.github/bootstrap/bootstrap.ts b/.github/bootstrap/bootstrap.ts new file mode 100644 index 00000000..6022cc8d --- /dev/null +++ b/.github/bootstrap/bootstrap.ts @@ -0,0 +1,85 @@ +/** + * IMPORTANT! If you change this file be sure to regenerate the compiled version with `npm run build` + */ + +"use strict"; + +import { readFileSync } from "node:fs" +import { join } from "node:path" +import { parse as urlParse } from "node:url" + +import { setFailed, getInput, getBooleanInput, getMultilineInput, startGroup, endGroup, info } from "@actions/core" +import { exec } from "@actions/exec" +import { find, cacheFile, downloadTool, extractTar } from "@actions/tool-cache" +import { parse as tomlParse } from 'toml' + +type GetArguments = (inputs: { + getInput: typeof getInput, + getBooleanInput: typeof getBooleanInput, + getMultilineInput: typeof getMultilineInput +}) => string[] + +export function invokeWith(getArgs: GetArguments) { + executeRustBinaryAction(getArgs).catch(e => { + if (e instanceof Error) { + setFailed(e.message) + } + }) +} + +async function executeRustBinaryAction(getArgs: GetArguments) { + startGroup('Bootstrapping'); + + const { platform, env } = process + const tempDirectory = env.RUNNER_TEMP + + if (platform !== 'win32' && platform !== 'darwin' && platform !== 'linux') { + throw new Error(`Unsupported platform: ${platform}`) + } + + const toml = tomlParse(readFileSync(join(__dirname, "../../Cargo.toml"), 'utf-8')) + + const { name } = toml.bin[0] + info(`name: ${name}`) + + const { repository, version } = toml.package + info(`version: ${version}\nrepository: ${repository}`) + + const binaryName = platform === 'win32' ? `${name}.exe` : name; + info(`binaryName: ${binaryName}`) + + const githubOrgAndName = urlParse(repository).pathname + .replace(/^\//, '') + .replace(/\.git$/, '') + + // now we should be able to build up our download url which looks something like this: + // https://github.com/heroku/languages-github-actions/releases/download/v0.0.0/actions-v0.0.0-darwin-x64.tar.gz + const releaseUrl = `https://github.com/${githubOrgAndName}/releases/download/v${version}/${name}-v${version}-${platform}-x64.tar.gz`; + info(`releaseUrl: ${releaseUrl}`) + + let cachedPath = find(githubOrgAndName, version) + info(`is cached: ${cachedPath ? true : false}`) + if (!cachedPath) { + const downloadPath = await downloadTool(releaseUrl) + info(`downloadPath: ${downloadPath}`) + + const extractPath = await extractTar(downloadPath, tempDirectory) + info(`extractPath: ${extractPath}`) + + const extractedFile = join(extractPath, binaryName) + info(`extractedFile: ${extractedFile}`) + + cachedPath = await cacheFile(extractedFile, binaryName, githubOrgAndName, version) + } + info(`using cache path: ${cachedPath}`) + + const rustBinary = join(cachedPath, name); + info(`using binary: ${rustBinary}`) + + const args = getArgs({ getInput, getBooleanInput, getMultilineInput }); + info(`using args: ${args.join(" ")}`) + + endGroup() + + await exec(rustBinary, args); +} diff --git a/.github/bootstrap/index.js b/.github/bootstrap/index.js new file mode 100644 index 00000000..f90d6888 --- /dev/null +++ b/.github/bootstrap/index.js @@ -0,0 +1,10875 @@ +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 7351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(5278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 2186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(7351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(8041); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(5840); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); +const core_1 = __nccwpck_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.result.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 1514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(1576); +const tr = __importStar(__nccwpck_require__(8159)); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 8159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const events = __importStar(__nccwpck_require__(2361)); +const child = __importStar(__nccwpck_require__(2081)); +const path = __importStar(__nccwpck_require__(1017)); +const io = __importStar(__nccwpck_require__(7436)); +const ioUtil = __importStar(__nccwpck_require__(1962)); +const timers_1 = __nccwpck_require__(9512); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 5526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 6255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9835: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 1962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const path = __importStar(__nccwpck_require__(1017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 7436: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(9491); +const path = __importStar(__nccwpck_require__(1017)); +const ioUtil = __importStar(__nccwpck_require__(1962)); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 2473: +/***/ (function(module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0; +const semver = __importStar(__nccwpck_require__(5911)); +const core_1 = __nccwpck_require__(2186); +// needs to be require for core node modules to be mocked +/* eslint @typescript-eslint/no-require-imports: 0 */ +const os = __nccwpck_require__(2037); +const cp = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +function _findMatch(versionSpec, stable, candidates, archFilter) { + return __awaiter(this, void 0, void 0, function* () { + const platFilter = os.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version = candidate.version; + core_1.debug(`check ${version} satisfies ${versionSpec}`); + if (semver.satisfies(version, versionSpec) && + (!stable || candidate.stable === stable)) { + file = candidate.files.find(item => { + core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } + else { + chk = semver.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + core_1.debug(`matched ${candidate.version}`); + match = candidate; + break; + } + } + } + if (match && file) { + // clone since we're mutating the file list to be only the file that matches + result = Object.assign({}, match); + result.files = [file]; + } + return result; + }); +} +exports._findMatch = _findMatch; +function _getOsVersion() { + // TODO: add windows and other linux, arm variants + // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) + const plat = os.platform(); + let version = ''; + if (plat === 'darwin') { + version = cp.execSync('sw_vers -productVersion').toString(); + } + else if (plat === 'linux') { + // lsb_release process not in some containers, readfile + // Run cat /etc/lsb-release + // DISTRIB_ID=Ubuntu + // DISTRIB_RELEASE=18.04 + // DISTRIB_CODENAME=bionic + // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" + const lsbContents = module.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split('\n'); + for (const line of lines) { + const parts = line.split('='); + if (parts.length === 2 && + (parts[0].trim() === 'VERSION_ID' || + parts[0].trim() === 'DISTRIB_RELEASE')) { + version = parts[1] + .trim() + .replace(/^"/, '') + .replace(/"$/, ''); + break; + } + } + } + } + return version; +} +exports._getOsVersion = _getOsVersion; +function _readLinuxVersionFile() { + const lsbReleaseFile = '/etc/lsb-release'; + const osReleaseFile = '/etc/os-release'; + let contents = ''; + if (fs.existsSync(lsbReleaseFile)) { + contents = fs.readFileSync(lsbReleaseFile).toString(); + } + else if (fs.existsSync(osReleaseFile)) { + contents = fs.readFileSync(osReleaseFile).toString(); + } + return contents; +} +exports._readLinuxVersionFile = _readLinuxVersionFile; +//# sourceMappingURL=manifest.js.map + +/***/ }), + +/***/ 8279: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RetryHelper = void 0; +const core = __importStar(__nccwpck_require__(2186)); +/** + * Internal class for retries + */ +class RetryHelper { + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error('max attempts should be greater than or equal to 1'); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error('min seconds should be less than or equal to max seconds'); + } + } + execute(action, isRetryable) { + return __awaiter(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + // Try + try { + return yield action(); + } + catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; + } + core.info(err.message); + } + // Sleep + const seconds = this.getSleepAmount(); + core.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; + } + // Last attempt + return yield action(); + }); + } + getSleepAmount() { + return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + + this.minSeconds); + } + sleep(seconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, seconds * 1000)); + }); + } +} +exports.RetryHelper = RetryHelper; +//# sourceMappingURL=retry-helper.js.map + +/***/ }), + +/***/ 7784: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const io = __importStar(__nccwpck_require__(7436)); +const fs = __importStar(__nccwpck_require__(7147)); +const mm = __importStar(__nccwpck_require__(2473)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const httpm = __importStar(__nccwpck_require__(6255)); +const semver = __importStar(__nccwpck_require__(5911)); +const stream = __importStar(__nccwpck_require__(2781)); +const util = __importStar(__nccwpck_require__(3837)); +const assert_1 = __nccwpck_require__(9491); +const v4_1 = __importDefault(__nccwpck_require__(7468)); +const exec_1 = __nccwpck_require__(1514); +const retry_helper_1 = __nccwpck_require__(8279); +class HTTPError extends Error { + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.HTTPError = HTTPError; +const IS_WINDOWS = process.platform === 'win32'; +const IS_MAC = process.platform === 'darwin'; +const userAgent = 'actions/tool-cache'; +/** + * Download a tool from an url and stream it into a file + * + * @param url url of tool to download + * @param dest path to download tool + * @param auth authorization header + * @param headers other headers + * @returns path to downloaded tool + */ +function downloadTool(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + dest = dest || path.join(_getTempDirectory(), v4_1.default()); + yield io.mkdirP(path.dirname(dest)); + core.debug(`Downloading ${url}`); + core.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); + const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || '', auth, headers); + }), (err) => { + if (err instanceof HTTPError && err.httpStatusCode) { + // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests + if (err.httpStatusCode < 500 && + err.httpStatusCode !== 408 && + err.httpStatusCode !== 429) { + return false; + } + } + // Otherwise retry + return true; + }); + }); +} +exports.downloadTool = downloadTool; +function downloadToolAttempt(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); + } + // Get the response headers + const http = new httpm.HttpClient(userAgent, [], { + allowRetries: false + }); + if (auth) { + core.debug('set auth'); + if (headers === undefined) { + headers = {}; + } + headers.authorization = auth; + } + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError(response.message.statusCode); + core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; + } + // Download the response body + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs.createWriteStream(dest)); + core.debug('download complete'); + succeeded = true; + return dest; + } + finally { + // Error, delete dest before retry + if (!succeeded) { + core.debug('download failed'); + try { + yield io.rmRF(dest); + } + catch (err) { + core.debug(`Failed to delete '${dest}'. ${err.message}`); + } + } + } + }); +} +/** + * Extract a .7z file + * + * @param file path to the .7z file + * @param dest destination directory. Optional. + * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this + * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will + * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is + * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line + * interface, it is smaller than the full command line interface, and it does support long paths. At the + * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. + * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path + * to 7zr.exe can be pass to this function. + * @returns path to the destination directory + */ +function extract7z(file, dest, _7zPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { + try { + const logLevel = core.isDebug() ? '-bb1' : '-bb0'; + const args = [ + 'x', + logLevel, + '-bd', + '-sccUTF-8', + file + ]; + const options = { + silent: true + }; + yield exec_1.exec(`"${_7zPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + else { + const escapedScript = path + .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') + .replace(/'/g, "''") + .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io.which('powershell', true); + yield exec_1.exec(`"${powershellPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + return dest; + }); +} +exports.extract7z = extract7z; +/** + * Extract a compressed tar archive + * + * @param file path to the tar + * @param dest destination directory. Optional. + * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. + * @returns path to the destination directory + */ +function extractTar(file, dest, flags = 'xz') { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + // Create dest + dest = yield _createExtractFolder(dest); + // Determine whether GNU tar + core.debug('Checking tar --version'); + let versionOutput = ''; + yield exec_1.exec('tar --version', [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); + // Initialize args + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + if (core.isDebug() && !flags.includes('v')) { + args.push('-v'); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push('--force-local'); + destArg = dest.replace(/\\/g, '/'); + // Technically only the dest needs to have `/` but for aesthetic consistency + // convert slashes in the file arg too. + fileArg = file.replace(/\\/g, '/'); + } + if (isGnuTar) { + // Suppress warnings when using GNU tar to extract archives created by BSD tar + args.push('--warning=no-unknown-keyword'); + args.push('--overwrite'); + } + args.push('-C', destArg, '-f', fileArg); + yield exec_1.exec(`tar`, args); + return dest; + }); +} +exports.extractTar = extractTar; +/** + * Extract a xar compatible archive + * + * @param file path to the archive + * @param dest destination directory. Optional. + * @param flags flags for the xar. Optional. + * @returns path to the destination directory + */ +function extractXar(file, dest, flags = []) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + args.push('-x', '-C', dest, '-f', file); + if (core.isDebug()) { + args.push('-v'); + } + const xarPath = yield io.which('xar', true); + yield exec_1.exec(`"${xarPath}"`, _unique(args)); + return dest; + }); +} +exports.extractXar = extractXar; +/** + * Extract a zip + * + * @param file path to the zip + * @param dest destination directory. Optional. + * @returns path to the destination directory + */ +function extractZip(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); + } + else { + yield extractZipNix(file, dest); + } + return dest; + }); +} +exports.extractZip = extractZip; +function extractZipWin(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + // build the powershell command + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const pwshPath = yield io.which('pwsh', false); + //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory + //and the -Force flag for Expand-Archive as a fallback + if (pwshPath) { + //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive + const pwshCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, + `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, + `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` + ].join(' '); + const args = [ + '-NoLogo', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + pwshCommand + ]; + core.debug(`Using pwsh at path: ${pwshPath}`); + yield exec_1.exec(`"${pwshPath}"`, args); + } + else { + const powershellCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, + `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, + `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` + ].join(' '); + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + powershellCommand + ]; + const powershellPath = yield io.which('powershell', true); + core.debug(`Using powershell at path: ${powershellPath}`); + yield exec_1.exec(`"${powershellPath}"`, args); + } + }); +} +function extractZipNix(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + const unzipPath = yield io.which('unzip', true); + const args = [file]; + if (!core.isDebug()) { + args.unshift('-q'); + } + args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run + yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); + }); +} +/** + * Caches a directory and installs it into the tool cacheDir + * + * @param sourceDir the directory to cache into tools + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheDir(sourceDir, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source dir: ${sourceDir}`); + if (!fs.statSync(sourceDir).isDirectory()) { + throw new Error('sourceDir is not a directory'); + } + // Create the tool dir + const destPath = yield _createToolPath(tool, version, arch); + // copy each child item. do not move. move can fail on Windows + // due to anti-virus software having an open handle on a file. + for (const itemName of fs.readdirSync(sourceDir)) { + const s = path.join(sourceDir, itemName); + yield io.cp(s, destPath, { recursive: true }); + } + // write .complete + _completeToolPath(tool, version, arch); + return destPath; + }); +} +exports.cacheDir = cacheDir; +/** + * Caches a downloaded file (GUID) and installs it + * into the tool cache with a given targetName + * + * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. + * @param targetFile the name of the file name in the tools directory + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheFile(sourceFile, targetFile, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source file: ${sourceFile}`); + if (!fs.statSync(sourceFile).isFile()) { + throw new Error('sourceFile is not a file'); + } + // create the tool dir + const destFolder = yield _createToolPath(tool, version, arch); + // copy instead of move. move can fail on Windows due to + // anti-virus software having an open handle on a file. + const destPath = path.join(destFolder, targetFile); + core.debug(`destination file ${destPath}`); + yield io.cp(sourceFile, destPath); + // write .complete + _completeToolPath(tool, version, arch); + return destFolder; + }); +} +exports.cacheFile = cacheFile; +/** + * Finds the path to a tool version in the local installed tool cache + * + * @param toolName name of the tool + * @param versionSpec version of the tool + * @param arch optional arch. defaults to arch of computer + */ +function find(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error('toolName parameter is required'); + } + if (!versionSpec) { + throw new Error('versionSpec parameter is required'); + } + arch = arch || os.arch(); + // attempt to resolve an explicit version + if (!isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = evaluateVersions(localVersions, versionSpec); + versionSpec = match; + } + // check for the explicit version in the cache + let toolPath = ''; + if (versionSpec) { + versionSpec = semver.clean(versionSpec) || ''; + const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); + core.debug(`checking cache: ${cachePath}`); + if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { + core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } + else { + core.debug('not found'); + } + } + return toolPath; +} +exports.find = find; +/** + * Finds the paths to all versions of a tool that are installed in the local tool cache + * + * @param toolName name of the tool + * @param arch optional arch. defaults to arch of computer + */ +function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os.arch(); + const toolPath = path.join(_getCacheDirectory(), toolName); + if (fs.existsSync(toolPath)) { + const children = fs.readdirSync(toolPath); + for (const child of children) { + if (isExplicitVersion(child)) { + const fullPath = path.join(toolPath, child, arch || ''); + if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { + versions.push(child); + } + } + } + } + return versions; +} +exports.findAllVersions = findAllVersions; +function getManifestFromRepo(owner, repo, auth, branch = 'master') { + return __awaiter(this, void 0, void 0, function* () { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient('tool-cache'); + const headers = {}; + if (auth) { + core.debug('set auth'); + headers.authorization = auth; + } + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; + } + let manifestUrl = ''; + for (const item of response.result.tree) { + if (item.path === 'versions-manifest.json') { + manifestUrl = item.url; + break; + } + } + headers['accept'] = 'application/vnd.github.VERSION.raw'; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + // shouldn't be needed but protects against invalid json saved with BOM + versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); + try { + releases = JSON.parse(versionsRaw); + } + catch (_a) { + core.debug('Invalid json'); + } + } + return releases; + }); +} +exports.getManifestFromRepo = getManifestFromRepo; +function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // wrap the internal impl + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); +} +exports.findFromManifest = findFromManifest; +function _createExtractFolder(dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!dest) { + // create a temp dir + dest = path.join(_getTempDirectory(), v4_1.default()); + } + yield io.mkdirP(dest); + return dest; + }); +} +function _createToolPath(tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + core.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io.rmRF(folderPath); + yield io.rmRF(markerPath); + yield io.mkdirP(folderPath); + return folderPath; + }); +} +function _completeToolPath(tool, version, arch) { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + const markerPath = `${folderPath}.complete`; + fs.writeFileSync(markerPath, ''); + core.debug('finished caching tool'); +} +/** + * Check if version string is explicit + * + * @param versionSpec version string to check + */ +function isExplicitVersion(versionSpec) { + const c = semver.clean(versionSpec) || ''; + core.debug(`isExplicit: ${c}`); + const valid = semver.valid(c) != null; + core.debug(`explicit? ${valid}`); + return valid; +} +exports.isExplicitVersion = isExplicitVersion; +/** + * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec` + * + * @param versions array of versions to evaluate + * @param versionSpec semantic version spec to satisfy + */ +function evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; +} +exports.evaluateVersions = evaluateVersions; +/** + * Gets RUNNER_TOOL_CACHE + */ +function _getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); + return cacheDirectory; +} +/** + * Gets RUNNER_TEMP + */ +function _getTempDirectory() { + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + return tempDirectory; +} +/** + * Gets a global variable + */ +function _getGlobal(key, defaultValue) { + /* eslint-disable @typescript-eslint/no-explicit-any */ + const value = global[key]; + /* eslint-enable @typescript-eslint/no-explicit-any */ + return value !== undefined ? value : defaultValue; +} +/** + * Returns an array of unique values. + * @param values Values to make unique. + */ +function _unique(values) { + return Array.from(new Set(values)); +} +//# sourceMappingURL=tool-cache.js.map + +/***/ }), + +/***/ 7701: +/***/ ((module) => { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 7269: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(6113); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 7468: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(7269); +var bytesToUuid = __nccwpck_require__(7701); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 5911: +/***/ ((module, exports) => { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} + + +/***/ }), + +/***/ 4920: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var parser = __nccwpck_require__(5642); +var compiler = __nccwpck_require__(8215); + +module.exports = { + parse: function(input) { + var nodes = parser.parse(input.toString()); + return compiler.compile(nodes); + } +}; + + +/***/ }), + +/***/ 8215: +/***/ ((module) => { + +"use strict"; + +function compile(nodes) { + var assignedPaths = []; + var valueAssignments = []; + var currentPath = ""; + var data = Object.create(null); + var context = data; + var arrayMode = false; + + return reduce(nodes); + + function reduce(nodes) { + var node; + for (var i = 0; i < nodes.length; i++) { + node = nodes[i]; + switch (node.type) { + case "Assign": + assign(node); + break; + case "ObjectPath": + setPath(node); + break; + case "ArrayPath": + addTableArray(node); + break; + } + } + + return data; + } + + function genError(err, line, col) { + var ex = new Error(err); + ex.line = line; + ex.column = col; + throw ex; + } + + function assign(node) { + var key = node.key; + var value = node.value; + var line = node.line; + var column = node.column; + + var fullPath; + if (currentPath) { + fullPath = currentPath + "." + key; + } else { + fullPath = key; + } + if (typeof context[key] !== "undefined") { + genError("Cannot redefine existing key '" + fullPath + "'.", line, column); + } + + context[key] = reduceValueNode(value); + + if (!pathAssigned(fullPath)) { + assignedPaths.push(fullPath); + valueAssignments.push(fullPath); + } + } + + + function pathAssigned(path) { + return assignedPaths.indexOf(path) !== -1; + } + + function reduceValueNode(node) { + if (node.type === "Array") { + return reduceArrayWithTypeChecking(node.value); + } else if (node.type === "InlineTable") { + return reduceInlineTableNode(node.value); + } else { + return node.value; + } + } + + function reduceInlineTableNode(values) { + var obj = Object.create(null); + for (var i = 0; i < values.length; i++) { + var val = values[i]; + if (val.value.type === "InlineTable") { + obj[val.key] = reduceInlineTableNode(val.value.value); + } else if (val.type === "InlineTableValue") { + obj[val.key] = reduceValueNode(val.value); + } + } + + return obj; + } + + function setPath(node) { + var path = node.value; + var quotedPath = path.map(quoteDottedString).join("."); + var line = node.line; + var column = node.column; + + if (pathAssigned(quotedPath)) { + genError("Cannot redefine existing key '" + path + "'.", line, column); + } + assignedPaths.push(quotedPath); + context = deepRef(data, path, Object.create(null), line, column); + currentPath = path; + } + + function addTableArray(node) { + var path = node.value; + var quotedPath = path.map(quoteDottedString).join("."); + var line = node.line; + var column = node.column; + + if (!pathAssigned(quotedPath)) { + assignedPaths.push(quotedPath); + } + assignedPaths = assignedPaths.filter(function(p) { + return p.indexOf(quotedPath) !== 0; + }); + assignedPaths.push(quotedPath); + context = deepRef(data, path, [], line, column); + currentPath = quotedPath; + + if (context instanceof Array) { + var newObj = Object.create(null); + context.push(newObj); + context = newObj; + } else { + genError("Cannot redefine existing key '" + path + "'.", line, column); + } + } + + // Given a path 'a.b.c', create (as necessary) `start.a`, + // `start.a.b`, and `start.a.b.c`, assigning `value` to `start.a.b.c`. + // If `a` or `b` are arrays and have items in them, the last item in the + // array is used as the context for the next sub-path. + function deepRef(start, keys, value, line, column) { + var traversed = []; + var traversedPath = ""; + var path = keys.join("."); + var ctx = start; + + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + traversed.push(key); + traversedPath = traversed.join("."); + if (typeof ctx[key] === "undefined") { + if (i === keys.length - 1) { + ctx[key] = value; + } else { + ctx[key] = Object.create(null); + } + } else if (i !== keys.length - 1 && valueAssignments.indexOf(traversedPath) > -1) { + // already a non-object value at key, can't be used as part of a new path + genError("Cannot redefine existing key '" + traversedPath + "'.", line, column); + } + + ctx = ctx[key]; + if (ctx instanceof Array && ctx.length && i < keys.length - 1) { + ctx = ctx[ctx.length - 1]; + } + } + + return ctx; + } + + function reduceArrayWithTypeChecking(array) { + // Ensure that all items in the array are of the same type + var firstType = null; + for (var i = 0; i < array.length; i++) { + var node = array[i]; + if (firstType === null) { + firstType = node.type; + } else { + if (node.type !== firstType) { + genError("Cannot add value of type " + node.type + " to array of type " + + firstType + ".", node.line, node.column); + } + } + } + + // Recursively reduce array of nodes into array of the nodes' values + return array.map(reduceValueNode); + } + + function quoteDottedString(str) { + if (str.indexOf(".") > -1) { + return "\"" + str + "\""; + } else { + return str; + } + } +} + +module.exports = { + compile: compile +}; + + +/***/ }), + +/***/ 5642: +/***/ ((module) => { + +module.exports = (function() { + /* + * Generated by PEG.js 0.8.0. + * + * http://pegjs.majda.cz/ + */ + + function peg$subclass(child, parent) { + function ctor() { this.constructor = child; } + ctor.prototype = parent.prototype; + child.prototype = new ctor(); + } + + function SyntaxError(message, expected, found, offset, line, column) { + this.message = message; + this.expected = expected; + this.found = found; + this.offset = offset; + this.line = line; + this.column = column; + + this.name = "SyntaxError"; + } + + peg$subclass(SyntaxError, Error); + + function parse(input) { + var options = arguments.length > 1 ? arguments[1] : {}, + + peg$FAILED = {}, + + peg$startRuleFunctions = { start: peg$parsestart }, + peg$startRuleFunction = peg$parsestart, + + peg$c0 = [], + peg$c1 = function() { return nodes }, + peg$c2 = peg$FAILED, + peg$c3 = "#", + peg$c4 = { type: "literal", value: "#", description: "\"#\"" }, + peg$c5 = void 0, + peg$c6 = { type: "any", description: "any character" }, + peg$c7 = "[", + peg$c8 = { type: "literal", value: "[", description: "\"[\"" }, + peg$c9 = "]", + peg$c10 = { type: "literal", value: "]", description: "\"]\"" }, + peg$c11 = function(name) { addNode(node('ObjectPath', name, line, column)) }, + peg$c12 = function(name) { addNode(node('ArrayPath', name, line, column)) }, + peg$c13 = function(parts, name) { return parts.concat(name) }, + peg$c14 = function(name) { return [name] }, + peg$c15 = function(name) { return name }, + peg$c16 = ".", + peg$c17 = { type: "literal", value: ".", description: "\".\"" }, + peg$c18 = "=", + peg$c19 = { type: "literal", value: "=", description: "\"=\"" }, + peg$c20 = function(key, value) { addNode(node('Assign', value, line, column, key)) }, + peg$c21 = function(chars) { return chars.join('') }, + peg$c22 = function(node) { return node.value }, + peg$c23 = "\"\"\"", + peg$c24 = { type: "literal", value: "\"\"\"", description: "\"\\\"\\\"\\\"\"" }, + peg$c25 = null, + peg$c26 = function(chars) { return node('String', chars.join(''), line, column) }, + peg$c27 = "\"", + peg$c28 = { type: "literal", value: "\"", description: "\"\\\"\"" }, + peg$c29 = "'''", + peg$c30 = { type: "literal", value: "'''", description: "\"'''\"" }, + peg$c31 = "'", + peg$c32 = { type: "literal", value: "'", description: "\"'\"" }, + peg$c33 = function(char) { return char }, + peg$c34 = function(char) { return char}, + peg$c35 = "\\", + peg$c36 = { type: "literal", value: "\\", description: "\"\\\\\"" }, + peg$c37 = function() { return '' }, + peg$c38 = "e", + peg$c39 = { type: "literal", value: "e", description: "\"e\"" }, + peg$c40 = "E", + peg$c41 = { type: "literal", value: "E", description: "\"E\"" }, + peg$c42 = function(left, right) { return node('Float', parseFloat(left + 'e' + right), line, column) }, + peg$c43 = function(text) { return node('Float', parseFloat(text), line, column) }, + peg$c44 = "+", + peg$c45 = { type: "literal", value: "+", description: "\"+\"" }, + peg$c46 = function(digits) { return digits.join('') }, + peg$c47 = "-", + peg$c48 = { type: "literal", value: "-", description: "\"-\"" }, + peg$c49 = function(digits) { return '-' + digits.join('') }, + peg$c50 = function(text) { return node('Integer', parseInt(text, 10), line, column) }, + peg$c51 = "true", + peg$c52 = { type: "literal", value: "true", description: "\"true\"" }, + peg$c53 = function() { return node('Boolean', true, line, column) }, + peg$c54 = "false", + peg$c55 = { type: "literal", value: "false", description: "\"false\"" }, + peg$c56 = function() { return node('Boolean', false, line, column) }, + peg$c57 = function() { return node('Array', [], line, column) }, + peg$c58 = function(value) { return node('Array', value ? [value] : [], line, column) }, + peg$c59 = function(values) { return node('Array', values, line, column) }, + peg$c60 = function(values, value) { return node('Array', values.concat(value), line, column) }, + peg$c61 = function(value) { return value }, + peg$c62 = ",", + peg$c63 = { type: "literal", value: ",", description: "\",\"" }, + peg$c64 = "{", + peg$c65 = { type: "literal", value: "{", description: "\"{\"" }, + peg$c66 = "}", + peg$c67 = { type: "literal", value: "}", description: "\"}\"" }, + peg$c68 = function(values) { return node('InlineTable', values, line, column) }, + peg$c69 = function(key, value) { return node('InlineTableValue', value, line, column, key) }, + peg$c70 = function(digits) { return "." + digits }, + peg$c71 = function(date) { return date.join('') }, + peg$c72 = ":", + peg$c73 = { type: "literal", value: ":", description: "\":\"" }, + peg$c74 = function(time) { return time.join('') }, + peg$c75 = "T", + peg$c76 = { type: "literal", value: "T", description: "\"T\"" }, + peg$c77 = "Z", + peg$c78 = { type: "literal", value: "Z", description: "\"Z\"" }, + peg$c79 = function(date, time) { return node('Date', new Date(date + "T" + time + "Z"), line, column) }, + peg$c80 = function(date, time) { return node('Date', new Date(date + "T" + time), line, column) }, + peg$c81 = /^[ \t]/, + peg$c82 = { type: "class", value: "[ \\t]", description: "[ \\t]" }, + peg$c83 = "\n", + peg$c84 = { type: "literal", value: "\n", description: "\"\\n\"" }, + peg$c85 = "\r", + peg$c86 = { type: "literal", value: "\r", description: "\"\\r\"" }, + peg$c87 = /^[0-9a-f]/i, + peg$c88 = { type: "class", value: "[0-9a-f]i", description: "[0-9a-f]i" }, + peg$c89 = /^[0-9]/, + peg$c90 = { type: "class", value: "[0-9]", description: "[0-9]" }, + peg$c91 = "_", + peg$c92 = { type: "literal", value: "_", description: "\"_\"" }, + peg$c93 = function() { return "" }, + peg$c94 = /^[A-Za-z0-9_\-]/, + peg$c95 = { type: "class", value: "[A-Za-z0-9_\\-]", description: "[A-Za-z0-9_\\-]" }, + peg$c96 = function(d) { return d.join('') }, + peg$c97 = "\\\"", + peg$c98 = { type: "literal", value: "\\\"", description: "\"\\\\\\\"\"" }, + peg$c99 = function() { return '"' }, + peg$c100 = "\\\\", + peg$c101 = { type: "literal", value: "\\\\", description: "\"\\\\\\\\\"" }, + peg$c102 = function() { return '\\' }, + peg$c103 = "\\b", + peg$c104 = { type: "literal", value: "\\b", description: "\"\\\\b\"" }, + peg$c105 = function() { return '\b' }, + peg$c106 = "\\t", + peg$c107 = { type: "literal", value: "\\t", description: "\"\\\\t\"" }, + peg$c108 = function() { return '\t' }, + peg$c109 = "\\n", + peg$c110 = { type: "literal", value: "\\n", description: "\"\\\\n\"" }, + peg$c111 = function() { return '\n' }, + peg$c112 = "\\f", + peg$c113 = { type: "literal", value: "\\f", description: "\"\\\\f\"" }, + peg$c114 = function() { return '\f' }, + peg$c115 = "\\r", + peg$c116 = { type: "literal", value: "\\r", description: "\"\\\\r\"" }, + peg$c117 = function() { return '\r' }, + peg$c118 = "\\U", + peg$c119 = { type: "literal", value: "\\U", description: "\"\\\\U\"" }, + peg$c120 = function(digits) { return convertCodePoint(digits.join('')) }, + peg$c121 = "\\u", + peg$c122 = { type: "literal", value: "\\u", description: "\"\\\\u\"" }, + + peg$currPos = 0, + peg$reportedPos = 0, + peg$cachedPos = 0, + peg$cachedPosDetails = { line: 1, column: 1, seenCR: false }, + peg$maxFailPos = 0, + peg$maxFailExpected = [], + peg$silentFails = 0, + + peg$cache = {}, + peg$result; + + if ("startRule" in options) { + if (!(options.startRule in peg$startRuleFunctions)) { + throw new Error("Can't start parsing from rule \"" + options.startRule + "\"."); + } + + peg$startRuleFunction = peg$startRuleFunctions[options.startRule]; + } + + function text() { + return input.substring(peg$reportedPos, peg$currPos); + } + + function offset() { + return peg$reportedPos; + } + + function line() { + return peg$computePosDetails(peg$reportedPos).line; + } + + function column() { + return peg$computePosDetails(peg$reportedPos).column; + } + + function expected(description) { + throw peg$buildException( + null, + [{ type: "other", description: description }], + peg$reportedPos + ); + } + + function error(message) { + throw peg$buildException(message, null, peg$reportedPos); + } + + function peg$computePosDetails(pos) { + function advance(details, startPos, endPos) { + var p, ch; + + for (p = startPos; p < endPos; p++) { + ch = input.charAt(p); + if (ch === "\n") { + if (!details.seenCR) { details.line++; } + details.column = 1; + details.seenCR = false; + } else if (ch === "\r" || ch === "\u2028" || ch === "\u2029") { + details.line++; + details.column = 1; + details.seenCR = true; + } else { + details.column++; + details.seenCR = false; + } + } + } + + if (peg$cachedPos !== pos) { + if (peg$cachedPos > pos) { + peg$cachedPos = 0; + peg$cachedPosDetails = { line: 1, column: 1, seenCR: false }; + } + advance(peg$cachedPosDetails, peg$cachedPos, pos); + peg$cachedPos = pos; + } + + return peg$cachedPosDetails; + } + + function peg$fail(expected) { + if (peg$currPos < peg$maxFailPos) { return; } + + if (peg$currPos > peg$maxFailPos) { + peg$maxFailPos = peg$currPos; + peg$maxFailExpected = []; + } + + peg$maxFailExpected.push(expected); + } + + function peg$buildException(message, expected, pos) { + function cleanupExpected(expected) { + var i = 1; + + expected.sort(function(a, b) { + if (a.description < b.description) { + return -1; + } else if (a.description > b.description) { + return 1; + } else { + return 0; + } + }); + + while (i < expected.length) { + if (expected[i - 1] === expected[i]) { + expected.splice(i, 1); + } else { + i++; + } + } + } + + function buildMessage(expected, found) { + function stringEscape(s) { + function hex(ch) { return ch.charCodeAt(0).toString(16).toUpperCase(); } + + return s + .replace(/\\/g, '\\\\') + .replace(/"/g, '\\"') + .replace(/\x08/g, '\\b') + .replace(/\t/g, '\\t') + .replace(/\n/g, '\\n') + .replace(/\f/g, '\\f') + .replace(/\r/g, '\\r') + .replace(/[\x00-\x07\x0B\x0E\x0F]/g, function(ch) { return '\\x0' + hex(ch); }) + .replace(/[\x10-\x1F\x80-\xFF]/g, function(ch) { return '\\x' + hex(ch); }) + .replace(/[\u0180-\u0FFF]/g, function(ch) { return '\\u0' + hex(ch); }) + .replace(/[\u1080-\uFFFF]/g, function(ch) { return '\\u' + hex(ch); }); + } + + var expectedDescs = new Array(expected.length), + expectedDesc, foundDesc, i; + + for (i = 0; i < expected.length; i++) { + expectedDescs[i] = expected[i].description; + } + + expectedDesc = expected.length > 1 + ? expectedDescs.slice(0, -1).join(", ") + + " or " + + expectedDescs[expected.length - 1] + : expectedDescs[0]; + + foundDesc = found ? "\"" + stringEscape(found) + "\"" : "end of input"; + + return "Expected " + expectedDesc + " but " + foundDesc + " found."; + } + + var posDetails = peg$computePosDetails(pos), + found = pos < input.length ? input.charAt(pos) : null; + + if (expected !== null) { + cleanupExpected(expected); + } + + return new SyntaxError( + message !== null ? message : buildMessage(expected, found), + expected, + found, + pos, + posDetails.line, + posDetails.column + ); + } + + function peg$parsestart() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 0, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseline(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseline(); + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c1(); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseline() { + var s0, s1, s2, s3, s4, s5, s6; + + var key = peg$currPos * 49 + 1, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parseexpression(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + s4 = []; + s5 = peg$parsecomment(); + while (s5 !== peg$FAILED) { + s4.push(s5); + s5 = peg$parsecomment(); + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseNL(); + if (s6 !== peg$FAILED) { + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseNL(); + } + } else { + s5 = peg$c2; + } + if (s5 === peg$FAILED) { + s5 = peg$parseEOF(); + } + if (s5 !== peg$FAILED) { + s1 = [s1, s2, s3, s4, s5]; + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + if (s2 !== peg$FAILED) { + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + } else { + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseNL(); + if (s3 !== peg$FAILED) { + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseNL(); + } + } else { + s2 = peg$c2; + } + if (s2 === peg$FAILED) { + s2 = peg$parseEOF(); + } + if (s2 !== peg$FAILED) { + s1 = [s1, s2]; + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$parseNL(); + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseexpression() { + var s0; + + var key = peg$currPos * 49 + 2, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parsecomment(); + if (s0 === peg$FAILED) { + s0 = peg$parsepath(); + if (s0 === peg$FAILED) { + s0 = peg$parsetablearray(); + if (s0 === peg$FAILED) { + s0 = peg$parseassignment(); + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsecomment() { + var s0, s1, s2, s3, s4, s5; + + var key = peg$currPos * 49 + 3, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 35) { + s1 = peg$c3; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c4); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$currPos; + s4 = peg$currPos; + peg$silentFails++; + s5 = peg$parseNL(); + if (s5 === peg$FAILED) { + s5 = peg$parseEOF(); + } + peg$silentFails--; + if (s5 === peg$FAILED) { + s4 = peg$c5; + } else { + peg$currPos = s4; + s4 = peg$c2; + } + if (s4 !== peg$FAILED) { + if (input.length > peg$currPos) { + s5 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s5 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s5 !== peg$FAILED) { + s4 = [s4, s5]; + s3 = s4; + } else { + peg$currPos = s3; + s3 = peg$c2; + } + } else { + peg$currPos = s3; + s3 = peg$c2; + } + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$currPos; + s4 = peg$currPos; + peg$silentFails++; + s5 = peg$parseNL(); + if (s5 === peg$FAILED) { + s5 = peg$parseEOF(); + } + peg$silentFails--; + if (s5 === peg$FAILED) { + s4 = peg$c5; + } else { + peg$currPos = s4; + s4 = peg$c2; + } + if (s4 !== peg$FAILED) { + if (input.length > peg$currPos) { + s5 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s5 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s5 !== peg$FAILED) { + s4 = [s4, s5]; + s3 = s4; + } else { + peg$currPos = s3; + s3 = peg$c2; + } + } else { + peg$currPos = s3; + s3 = peg$c2; + } + } + if (s2 !== peg$FAILED) { + s1 = [s1, s2]; + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsepath() { + var s0, s1, s2, s3, s4, s5; + + var key = peg$currPos * 49 + 4, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseS(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseS(); + } + if (s2 !== peg$FAILED) { + s3 = peg$parsetable_key(); + if (s3 !== peg$FAILED) { + s4 = []; + s5 = peg$parseS(); + while (s5 !== peg$FAILED) { + s4.push(s5); + s5 = peg$parseS(); + } + if (s4 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s5 = peg$c9; + peg$currPos++; + } else { + s5 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c11(s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsetablearray() { + var s0, s1, s2, s3, s4, s5, s6, s7; + + var key = peg$currPos * 49 + 5, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 91) { + s2 = peg$c7; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + s4 = peg$parsetable_key(); + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseS(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseS(); + } + if (s5 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s6 = peg$c9; + peg$currPos++; + } else { + s6 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s6 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s7 = peg$c9; + peg$currPos++; + } else { + s7 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s7 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c12(s4); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsetable_key() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 6, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parsedot_ended_table_key_part(); + if (s2 !== peg$FAILED) { + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parsedot_ended_table_key_part(); + } + } else { + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + s2 = peg$parsetable_key_part(); + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c13(s1, s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$parsetable_key_part(); + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c14(s1); + } + s0 = s1; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsetable_key_part() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 7, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsekey(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c15(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsequoted_key(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c15(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsedot_ended_table_key_part() { + var s0, s1, s2, s3, s4, s5, s6; + + var key = peg$currPos * 49 + 8, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsekey(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseS(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseS(); + } + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c15(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsequoted_key(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseS(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseS(); + } + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c15(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseassignment() { + var s0, s1, s2, s3, s4, s5; + + var key = peg$currPos * 49 + 9, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$parsekey(); + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseS(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseS(); + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 61) { + s3 = peg$c18; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c19); } + } + if (s3 !== peg$FAILED) { + s4 = []; + s5 = peg$parseS(); + while (s5 !== peg$FAILED) { + s4.push(s5); + s5 = peg$parseS(); + } + if (s4 !== peg$FAILED) { + s5 = peg$parsevalue(); + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c20(s1, s5); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$parsequoted_key(); + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseS(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseS(); + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 61) { + s3 = peg$c18; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c19); } + } + if (s3 !== peg$FAILED) { + s4 = []; + s5 = peg$parseS(); + while (s5 !== peg$FAILED) { + s4.push(s5); + s5 = peg$parseS(); + } + if (s4 !== peg$FAILED) { + s5 = peg$parsevalue(); + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c20(s1, s5); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsekey() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 10, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseASCII_BASIC(); + if (s2 !== peg$FAILED) { + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseASCII_BASIC(); + } + } else { + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c21(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsequoted_key() { + var s0, s1; + + var key = peg$currPos * 49 + 11, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$parsedouble_quoted_single_line_string(); + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c22(s1); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$parsesingle_quoted_single_line_string(); + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c22(s1); + } + s0 = s1; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsevalue() { + var s0; + + var key = peg$currPos * 49 + 12, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parsestring(); + if (s0 === peg$FAILED) { + s0 = peg$parsedatetime(); + if (s0 === peg$FAILED) { + s0 = peg$parsefloat(); + if (s0 === peg$FAILED) { + s0 = peg$parseinteger(); + if (s0 === peg$FAILED) { + s0 = peg$parseboolean(); + if (s0 === peg$FAILED) { + s0 = peg$parsearray(); + if (s0 === peg$FAILED) { + s0 = peg$parseinline_table(); + } + } + } + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsestring() { + var s0; + + var key = peg$currPos * 49 + 13, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parsedouble_quoted_multiline_string(); + if (s0 === peg$FAILED) { + s0 = peg$parsedouble_quoted_single_line_string(); + if (s0 === peg$FAILED) { + s0 = peg$parsesingle_quoted_multiline_string(); + if (s0 === peg$FAILED) { + s0 = peg$parsesingle_quoted_single_line_string(); + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsedouble_quoted_multiline_string() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 14, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.substr(peg$currPos, 3) === peg$c23) { + s1 = peg$c23; + peg$currPos += 3; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c24); } + } + if (s1 !== peg$FAILED) { + s2 = peg$parseNL(); + if (s2 === peg$FAILED) { + s2 = peg$c25; + } + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parsemultiline_string_char(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parsemultiline_string_char(); + } + if (s3 !== peg$FAILED) { + if (input.substr(peg$currPos, 3) === peg$c23) { + s4 = peg$c23; + peg$currPos += 3; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c24); } + } + if (s4 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c26(s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsedouble_quoted_single_line_string() { + var s0, s1, s2, s3; + + var key = peg$currPos * 49 + 15, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 34) { + s1 = peg$c27; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c28); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parsestring_char(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parsestring_char(); + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 34) { + s3 = peg$c27; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c28); } + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c26(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsesingle_quoted_multiline_string() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 16, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.substr(peg$currPos, 3) === peg$c29) { + s1 = peg$c29; + peg$currPos += 3; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c30); } + } + if (s1 !== peg$FAILED) { + s2 = peg$parseNL(); + if (s2 === peg$FAILED) { + s2 = peg$c25; + } + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parsemultiline_literal_char(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parsemultiline_literal_char(); + } + if (s3 !== peg$FAILED) { + if (input.substr(peg$currPos, 3) === peg$c29) { + s4 = peg$c29; + peg$currPos += 3; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c30); } + } + if (s4 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c26(s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsesingle_quoted_single_line_string() { + var s0, s1, s2, s3; + + var key = peg$currPos * 49 + 17, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 39) { + s1 = peg$c31; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c32); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseliteral_char(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseliteral_char(); + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 39) { + s3 = peg$c31; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c32); } + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c26(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsestring_char() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 18, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parseESCAPED(); + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$currPos; + peg$silentFails++; + if (input.charCodeAt(peg$currPos) === 34) { + s2 = peg$c27; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c28); } + } + peg$silentFails--; + if (s2 === peg$FAILED) { + s1 = peg$c5; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + if (input.length > peg$currPos) { + s2 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c33(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseliteral_char() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 19, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$currPos; + peg$silentFails++; + if (input.charCodeAt(peg$currPos) === 39) { + s2 = peg$c31; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c32); } + } + peg$silentFails--; + if (s2 === peg$FAILED) { + s1 = peg$c5; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + if (input.length > peg$currPos) { + s2 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c33(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsemultiline_string_char() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 20, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parseESCAPED(); + if (s0 === peg$FAILED) { + s0 = peg$parsemultiline_string_delim(); + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$currPos; + peg$silentFails++; + if (input.substr(peg$currPos, 3) === peg$c23) { + s2 = peg$c23; + peg$currPos += 3; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c24); } + } + peg$silentFails--; + if (s2 === peg$FAILED) { + s1 = peg$c5; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + if (input.length > peg$currPos) { + s2 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c34(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsemultiline_string_delim() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 21, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 92) { + s1 = peg$c35; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c36); } + } + if (s1 !== peg$FAILED) { + s2 = peg$parseNL(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseNLS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseNLS(); + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c37(); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsemultiline_literal_char() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 22, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$currPos; + peg$silentFails++; + if (input.substr(peg$currPos, 3) === peg$c29) { + s2 = peg$c29; + peg$currPos += 3; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c30); } + } + peg$silentFails--; + if (s2 === peg$FAILED) { + s1 = peg$c5; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + if (input.length > peg$currPos) { + s2 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c33(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsefloat() { + var s0, s1, s2, s3; + + var key = peg$currPos * 49 + 23, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$parsefloat_text(); + if (s1 === peg$FAILED) { + s1 = peg$parseinteger_text(); + } + if (s1 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 101) { + s2 = peg$c38; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c39); } + } + if (s2 === peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 69) { + s2 = peg$c40; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c41); } + } + } + if (s2 !== peg$FAILED) { + s3 = peg$parseinteger_text(); + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c42(s1, s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$parsefloat_text(); + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c43(s1); + } + s0 = s1; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsefloat_text() { + var s0, s1, s2, s3, s4, s5; + + var key = peg$currPos * 49 + 24, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 43) { + s1 = peg$c44; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c45); } + } + if (s1 === peg$FAILED) { + s1 = peg$c25; + } + if (s1 !== peg$FAILED) { + s2 = peg$currPos; + s3 = peg$parseDIGITS(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + if (s4 !== peg$FAILED) { + s5 = peg$parseDIGITS(); + if (s5 !== peg$FAILED) { + s3 = [s3, s4, s5]; + s2 = s3; + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c46(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 45) { + s1 = peg$c47; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c48); } + } + if (s1 !== peg$FAILED) { + s2 = peg$currPos; + s3 = peg$parseDIGITS(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + if (s4 !== peg$FAILED) { + s5 = peg$parseDIGITS(); + if (s5 !== peg$FAILED) { + s3 = [s3, s4, s5]; + s2 = s3; + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c49(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseinteger() { + var s0, s1; + + var key = peg$currPos * 49 + 25, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$parseinteger_text(); + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c50(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseinteger_text() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 26, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 43) { + s1 = peg$c44; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c45); } + } + if (s1 === peg$FAILED) { + s1 = peg$c25; + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseDIGIT_OR_UNDER(); + if (s3 !== peg$FAILED) { + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseDIGIT_OR_UNDER(); + } + } else { + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + s3 = peg$currPos; + peg$silentFails++; + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + peg$silentFails--; + if (s4 === peg$FAILED) { + s3 = peg$c5; + } else { + peg$currPos = s3; + s3 = peg$c2; + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c46(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 45) { + s1 = peg$c47; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c48); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseDIGIT_OR_UNDER(); + if (s3 !== peg$FAILED) { + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseDIGIT_OR_UNDER(); + } + } else { + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + s3 = peg$currPos; + peg$silentFails++; + if (input.charCodeAt(peg$currPos) === 46) { + s4 = peg$c16; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + peg$silentFails--; + if (s4 === peg$FAILED) { + s3 = peg$c5; + } else { + peg$currPos = s3; + s3 = peg$c2; + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c49(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseboolean() { + var s0, s1; + + var key = peg$currPos * 49 + 27, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.substr(peg$currPos, 4) === peg$c51) { + s1 = peg$c51; + peg$currPos += 4; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c52); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c53(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 5) === peg$c54) { + s1 = peg$c54; + peg$currPos += 5; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c55); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c56(); + } + s0 = s1; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsearray() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 28, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parsearray_sep(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parsearray_sep(); + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s3 = peg$c9; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c57(); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + s2 = peg$parsearray_value(); + if (s2 === peg$FAILED) { + s2 = peg$c25; + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s3 = peg$c9; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c58(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parsearray_value_list(); + if (s3 !== peg$FAILED) { + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parsearray_value_list(); + } + } else { + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s3 = peg$c9; + peg$currPos++; + } else { + s3 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c59(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 91) { + s1 = peg$c7; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c8); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parsearray_value_list(); + if (s3 !== peg$FAILED) { + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parsearray_value_list(); + } + } else { + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + s3 = peg$parsearray_value(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 93) { + s4 = peg$c9; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c10); } + } + if (s4 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c60(s2, s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsearray_value() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 29, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parsearray_sep(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parsearray_sep(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsevalue(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parsearray_sep(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parsearray_sep(); + } + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c61(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsearray_value_list() { + var s0, s1, s2, s3, s4, s5, s6; + + var key = peg$currPos * 49 + 30, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parsearray_sep(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parsearray_sep(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsevalue(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parsearray_sep(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parsearray_sep(); + } + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 44) { + s4 = peg$c62; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c63); } + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parsearray_sep(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parsearray_sep(); + } + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c61(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsearray_sep() { + var s0; + + var key = peg$currPos * 49 + 31, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parseS(); + if (s0 === peg$FAILED) { + s0 = peg$parseNL(); + if (s0 === peg$FAILED) { + s0 = peg$parsecomment(); + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseinline_table() { + var s0, s1, s2, s3, s4, s5; + + var key = peg$currPos * 49 + 32, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 123) { + s1 = peg$c64; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c65); } + } + if (s1 !== peg$FAILED) { + s2 = []; + s3 = peg$parseS(); + while (s3 !== peg$FAILED) { + s2.push(s3); + s3 = peg$parseS(); + } + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseinline_table_assignment(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseinline_table_assignment(); + } + if (s3 !== peg$FAILED) { + s4 = []; + s5 = peg$parseS(); + while (s5 !== peg$FAILED) { + s4.push(s5); + s5 = peg$parseS(); + } + if (s4 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 125) { + s5 = peg$c66; + peg$currPos++; + } else { + s5 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c67); } + } + if (s5 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c68(s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseinline_table_assignment() { + var s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10; + + var key = peg$currPos * 49 + 33, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsekey(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 61) { + s4 = peg$c18; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c19); } + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseS(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseS(); + } + if (s5 !== peg$FAILED) { + s6 = peg$parsevalue(); + if (s6 !== peg$FAILED) { + s7 = []; + s8 = peg$parseS(); + while (s8 !== peg$FAILED) { + s7.push(s8); + s8 = peg$parseS(); + } + if (s7 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 44) { + s8 = peg$c62; + peg$currPos++; + } else { + s8 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c63); } + } + if (s8 !== peg$FAILED) { + s9 = []; + s10 = peg$parseS(); + while (s10 !== peg$FAILED) { + s9.push(s10); + s10 = peg$parseS(); + } + if (s9 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c69(s2, s6); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = []; + s2 = peg$parseS(); + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseS(); + } + if (s1 !== peg$FAILED) { + s2 = peg$parsekey(); + if (s2 !== peg$FAILED) { + s3 = []; + s4 = peg$parseS(); + while (s4 !== peg$FAILED) { + s3.push(s4); + s4 = peg$parseS(); + } + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 61) { + s4 = peg$c18; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c19); } + } + if (s4 !== peg$FAILED) { + s5 = []; + s6 = peg$parseS(); + while (s6 !== peg$FAILED) { + s5.push(s6); + s6 = peg$parseS(); + } + if (s5 !== peg$FAILED) { + s6 = peg$parsevalue(); + if (s6 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c69(s2, s6); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsesecfragment() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 34, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 46) { + s1 = peg$c16; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c17); } + } + if (s1 !== peg$FAILED) { + s2 = peg$parseDIGITS(); + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c70(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsedate() { + var s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11; + + var key = peg$currPos * 49 + 35, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$currPos; + s2 = peg$parseDIGIT_OR_UNDER(); + if (s2 !== peg$FAILED) { + s3 = peg$parseDIGIT_OR_UNDER(); + if (s3 !== peg$FAILED) { + s4 = peg$parseDIGIT_OR_UNDER(); + if (s4 !== peg$FAILED) { + s5 = peg$parseDIGIT_OR_UNDER(); + if (s5 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 45) { + s6 = peg$c47; + peg$currPos++; + } else { + s6 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c48); } + } + if (s6 !== peg$FAILED) { + s7 = peg$parseDIGIT_OR_UNDER(); + if (s7 !== peg$FAILED) { + s8 = peg$parseDIGIT_OR_UNDER(); + if (s8 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 45) { + s9 = peg$c47; + peg$currPos++; + } else { + s9 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c48); } + } + if (s9 !== peg$FAILED) { + s10 = peg$parseDIGIT_OR_UNDER(); + if (s10 !== peg$FAILED) { + s11 = peg$parseDIGIT_OR_UNDER(); + if (s11 !== peg$FAILED) { + s2 = [s2, s3, s4, s5, s6, s7, s8, s9, s10, s11]; + s1 = s2; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c71(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsetime() { + var s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10; + + var key = peg$currPos * 49 + 36, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$currPos; + s2 = peg$parseDIGIT_OR_UNDER(); + if (s2 !== peg$FAILED) { + s3 = peg$parseDIGIT_OR_UNDER(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 58) { + s4 = peg$c72; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c73); } + } + if (s4 !== peg$FAILED) { + s5 = peg$parseDIGIT_OR_UNDER(); + if (s5 !== peg$FAILED) { + s6 = peg$parseDIGIT_OR_UNDER(); + if (s6 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 58) { + s7 = peg$c72; + peg$currPos++; + } else { + s7 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c73); } + } + if (s7 !== peg$FAILED) { + s8 = peg$parseDIGIT_OR_UNDER(); + if (s8 !== peg$FAILED) { + s9 = peg$parseDIGIT_OR_UNDER(); + if (s9 !== peg$FAILED) { + s10 = peg$parsesecfragment(); + if (s10 === peg$FAILED) { + s10 = peg$c25; + } + if (s10 !== peg$FAILED) { + s2 = [s2, s3, s4, s5, s6, s7, s8, s9, s10]; + s1 = s2; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c74(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsetime_with_offset() { + var s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16; + + var key = peg$currPos * 49 + 37, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$currPos; + s2 = peg$parseDIGIT_OR_UNDER(); + if (s2 !== peg$FAILED) { + s3 = peg$parseDIGIT_OR_UNDER(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 58) { + s4 = peg$c72; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c73); } + } + if (s4 !== peg$FAILED) { + s5 = peg$parseDIGIT_OR_UNDER(); + if (s5 !== peg$FAILED) { + s6 = peg$parseDIGIT_OR_UNDER(); + if (s6 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 58) { + s7 = peg$c72; + peg$currPos++; + } else { + s7 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c73); } + } + if (s7 !== peg$FAILED) { + s8 = peg$parseDIGIT_OR_UNDER(); + if (s8 !== peg$FAILED) { + s9 = peg$parseDIGIT_OR_UNDER(); + if (s9 !== peg$FAILED) { + s10 = peg$parsesecfragment(); + if (s10 === peg$FAILED) { + s10 = peg$c25; + } + if (s10 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 45) { + s11 = peg$c47; + peg$currPos++; + } else { + s11 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c48); } + } + if (s11 === peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 43) { + s11 = peg$c44; + peg$currPos++; + } else { + s11 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c45); } + } + } + if (s11 !== peg$FAILED) { + s12 = peg$parseDIGIT_OR_UNDER(); + if (s12 !== peg$FAILED) { + s13 = peg$parseDIGIT_OR_UNDER(); + if (s13 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 58) { + s14 = peg$c72; + peg$currPos++; + } else { + s14 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c73); } + } + if (s14 !== peg$FAILED) { + s15 = peg$parseDIGIT_OR_UNDER(); + if (s15 !== peg$FAILED) { + s16 = peg$parseDIGIT_OR_UNDER(); + if (s16 !== peg$FAILED) { + s2 = [s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16]; + s1 = s2; + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + } else { + peg$currPos = s1; + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c74(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parsedatetime() { + var s0, s1, s2, s3, s4; + + var key = peg$currPos * 49 + 38, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = peg$parsedate(); + if (s1 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 84) { + s2 = peg$c75; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c76); } + } + if (s2 !== peg$FAILED) { + s3 = peg$parsetime(); + if (s3 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 90) { + s4 = peg$c77; + peg$currPos++; + } else { + s4 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c78); } + } + if (s4 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c79(s1, s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + s1 = peg$parsedate(); + if (s1 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 84) { + s2 = peg$c75; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c76); } + } + if (s2 !== peg$FAILED) { + s3 = peg$parsetime_with_offset(); + if (s3 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c80(s1, s3); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseS() { + var s0; + + var key = peg$currPos * 49 + 39, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + if (peg$c81.test(input.charAt(peg$currPos))) { + s0 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s0 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c82); } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseNL() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 40, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + if (input.charCodeAt(peg$currPos) === 10) { + s0 = peg$c83; + peg$currPos++; + } else { + s0 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c84); } + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 13) { + s1 = peg$c85; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c86); } + } + if (s1 !== peg$FAILED) { + if (input.charCodeAt(peg$currPos) === 10) { + s2 = peg$c83; + peg$currPos++; + } else { + s2 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c84); } + } + if (s2 !== peg$FAILED) { + s1 = [s1, s2]; + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseNLS() { + var s0; + + var key = peg$currPos * 49 + 41, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$parseNL(); + if (s0 === peg$FAILED) { + s0 = peg$parseS(); + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseEOF() { + var s0, s1; + + var key = peg$currPos * 49 + 42, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + peg$silentFails++; + if (input.length > peg$currPos) { + s1 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c6); } + } + peg$silentFails--; + if (s1 === peg$FAILED) { + s0 = peg$c5; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseHEX() { + var s0; + + var key = peg$currPos * 49 + 43, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + if (peg$c87.test(input.charAt(peg$currPos))) { + s0 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s0 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c88); } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseDIGIT_OR_UNDER() { + var s0, s1; + + var key = peg$currPos * 49 + 44, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + if (peg$c89.test(input.charAt(peg$currPos))) { + s0 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s0 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c90); } + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.charCodeAt(peg$currPos) === 95) { + s1 = peg$c91; + peg$currPos++; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c92); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c93(); + } + s0 = s1; + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseASCII_BASIC() { + var s0; + + var key = peg$currPos * 49 + 45, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + if (peg$c94.test(input.charAt(peg$currPos))) { + s0 = input.charAt(peg$currPos); + peg$currPos++; + } else { + s0 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c95); } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseDIGITS() { + var s0, s1, s2; + + var key = peg$currPos * 49 + 46, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + s1 = []; + s2 = peg$parseDIGIT_OR_UNDER(); + if (s2 !== peg$FAILED) { + while (s2 !== peg$FAILED) { + s1.push(s2); + s2 = peg$parseDIGIT_OR_UNDER(); + } + } else { + s1 = peg$c2; + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c96(s1); + } + s0 = s1; + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseESCAPED() { + var s0, s1; + + var key = peg$currPos * 49 + 47, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c97) { + s1 = peg$c97; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c98); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c99(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c100) { + s1 = peg$c100; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c101); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c102(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c103) { + s1 = peg$c103; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c104); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c105(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c106) { + s1 = peg$c106; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c107); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c108(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c109) { + s1 = peg$c109; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c110); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c111(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c112) { + s1 = peg$c112; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c113); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c114(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c115) { + s1 = peg$c115; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c116); } + } + if (s1 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c117(); + } + s0 = s1; + if (s0 === peg$FAILED) { + s0 = peg$parseESCAPED_UNICODE(); + } + } + } + } + } + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + function peg$parseESCAPED_UNICODE() { + var s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10; + + var key = peg$currPos * 49 + 48, + cached = peg$cache[key]; + + if (cached) { + peg$currPos = cached.nextPos; + return cached.result; + } + + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c118) { + s1 = peg$c118; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c119); } + } + if (s1 !== peg$FAILED) { + s2 = peg$currPos; + s3 = peg$parseHEX(); + if (s3 !== peg$FAILED) { + s4 = peg$parseHEX(); + if (s4 !== peg$FAILED) { + s5 = peg$parseHEX(); + if (s5 !== peg$FAILED) { + s6 = peg$parseHEX(); + if (s6 !== peg$FAILED) { + s7 = peg$parseHEX(); + if (s7 !== peg$FAILED) { + s8 = peg$parseHEX(); + if (s8 !== peg$FAILED) { + s9 = peg$parseHEX(); + if (s9 !== peg$FAILED) { + s10 = peg$parseHEX(); + if (s10 !== peg$FAILED) { + s3 = [s3, s4, s5, s6, s7, s8, s9, s10]; + s2 = s3; + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c120(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + if (s0 === peg$FAILED) { + s0 = peg$currPos; + if (input.substr(peg$currPos, 2) === peg$c121) { + s1 = peg$c121; + peg$currPos += 2; + } else { + s1 = peg$FAILED; + if (peg$silentFails === 0) { peg$fail(peg$c122); } + } + if (s1 !== peg$FAILED) { + s2 = peg$currPos; + s3 = peg$parseHEX(); + if (s3 !== peg$FAILED) { + s4 = peg$parseHEX(); + if (s4 !== peg$FAILED) { + s5 = peg$parseHEX(); + if (s5 !== peg$FAILED) { + s6 = peg$parseHEX(); + if (s6 !== peg$FAILED) { + s3 = [s3, s4, s5, s6]; + s2 = s3; + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + } else { + peg$currPos = s2; + s2 = peg$c2; + } + if (s2 !== peg$FAILED) { + peg$reportedPos = s0; + s1 = peg$c120(s2); + s0 = s1; + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } else { + peg$currPos = s0; + s0 = peg$c2; + } + } + + peg$cache[key] = { nextPos: peg$currPos, result: s0 }; + + return s0; + } + + + var nodes = []; + + function genError(err, line, col) { + var ex = new Error(err); + ex.line = line; + ex.column = col; + throw ex; + } + + function addNode(node) { + nodes.push(node); + } + + function node(type, value, line, column, key) { + var obj = { type: type, value: value, line: line(), column: column() }; + if (key) obj.key = key; + return obj; + } + + function convertCodePoint(str, line, col) { + var num = parseInt("0x" + str); + + if ( + !isFinite(num) || + Math.floor(num) != num || + num < 0 || + num > 0x10FFFF || + (num > 0xD7FF && num < 0xE000) + ) { + genError("Invalid Unicode escape code: " + str, line, col); + } else { + return fromCodePoint(num); + } + } + + function fromCodePoint() { + var MAX_SIZE = 0x4000; + var codeUnits = []; + var highSurrogate; + var lowSurrogate; + var index = -1; + var length = arguments.length; + if (!length) { + return ''; + } + var result = ''; + while (++index < length) { + var codePoint = Number(arguments[index]); + if (codePoint <= 0xFFFF) { // BMP code point + codeUnits.push(codePoint); + } else { // Astral code point; split in surrogate halves + // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint -= 0x10000; + highSurrogate = (codePoint >> 10) + 0xD800; + lowSurrogate = (codePoint % 0x400) + 0xDC00; + codeUnits.push(highSurrogate, lowSurrogate); + } + if (index + 1 == length || codeUnits.length > MAX_SIZE) { + result += String.fromCharCode.apply(null, codeUnits); + codeUnits.length = 0; + } + } + return result; + } + + + peg$result = peg$startRuleFunction(); + + if (peg$result !== peg$FAILED && peg$currPos === input.length) { + return peg$result; + } else { + if (peg$result !== peg$FAILED && peg$currPos < input.length) { + peg$fail({ type: "end", description: "end of input" }); + } + + throw peg$buildException(null, peg$maxFailExpected, peg$maxFailPos); + } + } + + return { + SyntaxError: SyntaxError, + parse: parse + }; +})(); + + +/***/ }), + +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(4219); + + +/***/ }), + +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 5840: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(8628)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); + +var _nil = _interopRequireDefault(__nccwpck_require__(5332)); + +var _version = _interopRequireDefault(__nccwpck_require__(1595)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 4569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 5332: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 2746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5274: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 8950: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 8628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _md = _interopRequireDefault(__nccwpck_require__(4569)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 5122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9120: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5274)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 9491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 2081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 6113: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 2361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 7147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 3685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 2037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 1017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 2781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 1576: +/***/ ((module) => { + +"use strict"; +module.exports = require("string_decoder"); + +/***/ }), + +/***/ 9512: +/***/ ((module) => { + +"use strict"; +module.exports = require("timers"); + +/***/ }), + +/***/ 4404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }) + +/******/ }); +/************************************************************************/ +/******/ // The module cache +/******/ var __webpack_module_cache__ = {}; +/******/ +/******/ // The require function +/******/ function __nccwpck_require__(moduleId) { +/******/ // Check if module is in cache +/******/ var cachedModule = __webpack_module_cache__[moduleId]; +/******/ if (cachedModule !== undefined) { +/******/ return cachedModule.exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = __webpack_module_cache__[moduleId] = { +/******/ // no module.id needed +/******/ // no module.loaded needed +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete __webpack_module_cache__[moduleId]; +/******/ } +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/************************************************************************/ +/******/ /* webpack/runtime/define property getters */ +/******/ (() => { +/******/ // define getter functions for harmony exports +/******/ __nccwpck_require__.d = (exports, definition) => { +/******/ for(var key in definition) { +/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { +/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); +/******/ } +/******/ } +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/hasOwnProperty shorthand */ +/******/ (() => { +/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) +/******/ })(); +/******/ +/******/ /* webpack/runtime/make namespace object */ +/******/ (() => { +/******/ // define __esModule on exports +/******/ __nccwpck_require__.r = (exports) => { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/compat */ +/******/ +/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; +/******/ +/************************************************************************/ +var __webpack_exports__ = {}; +// This entry need to be wrapped in an IIFE because it need to be in strict mode. +(() => { +"use strict"; +// ESM COMPAT FLAG +__nccwpck_require__.r(__webpack_exports__); + +// EXPORTS +__nccwpck_require__.d(__webpack_exports__, { + "invokeWith": () => (/* binding */ invokeWith) +}); + +;// CONCATENATED MODULE: external "node:fs" +const external_node_fs_namespaceObject = require("node:fs"); +;// CONCATENATED MODULE: external "node:path" +const external_node_path_namespaceObject = require("node:path"); +;// CONCATENATED MODULE: external "node:url" +const external_node_url_namespaceObject = require("node:url"); +// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js +var core = __nccwpck_require__(2186); +// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js +var exec = __nccwpck_require__(1514); +// EXTERNAL MODULE: ./node_modules/@actions/tool-cache/lib/tool-cache.js +var tool_cache = __nccwpck_require__(7784); +// EXTERNAL MODULE: ./node_modules/toml/index.js +var node_modules_toml = __nccwpck_require__(4920); +;// CONCATENATED MODULE: ./.github/bootstrap/bootstrap.ts +/** + * IMPORTANT! If you change this file be sure to regenerate the compiled version with `npm run build` + */ + +var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; + + + + + + + +function invokeWith(getArgs) { + executeRustBinaryAction(getArgs).catch(e => { + if (e instanceof Error) { + (0,core.setFailed)(e.message); + } + }); +} +function executeRustBinaryAction(getArgs) { + return __awaiter(this, void 0, void 0, function* () { + (0,core.startGroup)('Bootstrapping'); + const { platform, env } = process; + const tempDirectory = env.RUNNER_TEMP; + if (platform !== 'win32' && platform !== 'darwin' && platform !== 'linux') { + throw new Error(`Unsupported platform: ${platform}`); + } + const toml = (0,node_modules_toml.parse)((0,external_node_fs_namespaceObject.readFileSync)((0,external_node_path_namespaceObject.join)(__dirname, "../../Cargo.toml"), 'utf-8')); + const { name } = toml.bin[0]; + (0,core.info)(`name: ${name}`); + const { repository, version } = toml.package; + (0,core.info)(`version: ${version}\nrepository: ${repository}`); + const binaryName = platform === 'win32' ? `${name}.exe` : name; + (0,core.info)(`binaryName: ${binaryName}`); + const githubOrgAndName = (0,external_node_url_namespaceObject.parse)(repository).pathname + .replace(/^\//, '') + .replace(/\.git$/, ''); + // now we should be able to build up our download url which looks something like this: + // https://github.com/heroku/languages-github-actions/releases/download/v0.0.0/actions-v0.0.0-darwin-x64.tar.gz + const releaseUrl = `https://github.com/${githubOrgAndName}/releases/download/v${version}/${name}-v${version}-${platform}-x64.tar.gz`; + (0,core.info)(`releaseUrl: ${releaseUrl}`); + let cachedPath = (0,tool_cache.find)(githubOrgAndName, version); + (0,core.info)(`is cached: ${cachedPath ? true : false}`); + if (!cachedPath) { + const downloadPath = yield (0,tool_cache.downloadTool)(releaseUrl); + (0,core.info)(`downloadPath: ${downloadPath}`); + const extractPath = yield (0,tool_cache.extractTar)(downloadPath, tempDirectory); + (0,core.info)(`extractPath: ${extractPath}`); + const extractedFile = (0,external_node_path_namespaceObject.join)(extractPath, binaryName); + (0,core.info)(`extractedFile: ${extractedFile}`); + cachedPath = yield (0,tool_cache.cacheFile)(extractedFile, binaryName, githubOrgAndName, version); + } + (0,core.info)(`using cache path: ${cachedPath}`); + const rustBinary = (0,external_node_path_namespaceObject.join)(cachedPath, name); + (0,core.info)(`using binary: ${rustBinary}`); + const args = getArgs({ getInput: core.getInput, getBooleanInput: core.getBooleanInput, getMultilineInput: core.getMultilineInput }); + (0,core.info)(`using args: ${args.join(" ")}`); + (0,core.endGroup)(); + yield (0,exec.exec)(rustBinary, args); + }); +} + +})(); + +module.exports = __webpack_exports__; +/******/ })() +; \ No newline at end of file diff --git a/.github/workflows/build-test-release.yml b/.github/workflows/build-test-release.yml new file mode 100644 index 00000000..e72c60c0 --- /dev/null +++ b/.github/workflows/build-test-release.yml @@ -0,0 +1,126 @@ +name: Build, Test, and Release Shared GitHub Actions + +on: + pull_request: + push: + branches: ["main"] + +permissions: + contents: write + +env: + CARGO_TERM_COLOR: always + +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Update Rust toolchain + run: rustup update + - name: Rust Cache + uses: Swatinem/rust-cache@v2 + - name: Clippy + run: cargo clippy --all-targets --all-features -- --deny warnings + - name: rustfmt + run: cargo fmt -- --check + - name: Check docs + run: RUSTDOCFLAGS="-D warnings" cargo doc --all-features --document-private-items --no-deps + + test: + name: Test + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Update Rust toolchain + run: rustup update + - name: Rust Cache + uses: Swatinem/rust-cache@v2 + - name: Test + run: cargo test --all-features + + check-bootstrap: + name: Check Bootstrap Script + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Node + uses: actions/setup-node@v3 + with: + node-version: lts/* + cache: npm + - name: Install Node dependencies + run: npm ci + - name: Rebuild bootstrap script + run: npm run build + - name: Check bootstrap script for uncommitted changes + run: if [ -n "$(git status --porcelain)" ]; then exit 1; fi + + release: + name: Release / ${{ matrix.os }} + if: success() && github.ref == 'refs/heads/main' + needs: + - lint + - test + - check-bootstrap + strategy: + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Update Rust toolchain + run: rustup update + - name: Rust Cache + uses: Swatinem/rust-cache@v2 + - name: Build + run: cargo build --release + - name: Get Cargo Metadata + id: metadata + shell: bash + run: | + echo "version=$(cargo metadata --format-version=1 --no-deps | jq -r '.packages[-1].version')" >> "$GITHUB_OUTPUT" + echo "name=$(cargo metadata --format-version=1 --no-deps | jq -r '.packages[-1].targets[-1].name')" >> "$GITHUB_OUTPUT" + - name: Bundle Release Asset + id: asset + shell: bash + env: + NAME: ${{ steps.metadata.outputs.name }} + VERSION: ${{ steps.metadata.outputs.version }} + OS: ${{ matrix.os }} + run: | + export ARCH="linux" && + if [ "$OS" = "macos-latest" ]; then export ARCH="darwin"; fi && + if [ "$OS" = "windows-latest" ]; then export ARCH="win32"; fi + + export ASSET_NAME="${NAME}-v${VERSION}-${ARCH}-x64.tar.gz" + + if [ "$OS" = "windows-latest" ]; then export RUNNER_TEMP="$(echo $RUNNER_TEMP | sed 's/\\/\//g')"; fi + + export ASSET_PATH="${RUNNER_TEMP}/${ASSET_NAME}" && + if [ "$OS" = "windows-latest" ]; then export ASSET_PATH="${RUNNER_TEMP}/${ASSET_NAME}"; fi + + if [ "$OS" = "windows-latest" ]; then export NAME="${NAME}.exe"; fi + + export BINARY="./target/release/${NAME}" + + if [ "$OS" != "windows-latest" ]; then strip "$BINARY"; fi + if [ "$OS" != "windows-latest" ]; then tar -czf "$ASSET_PATH" -C "./target/release" "$NAME"; fi + + if [ "$OS" = "windows-latest" ]; then tar --force-local -czf "$ASSET_PATH" -C "./target/release" "$NAME"; fi + + echo "path=${ASSET_PATH}" >> "$GITHUB_OUTPUT" + - name: Create Release + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: v${{ steps.metadata.outputs.version }} + files: ${{ steps.asset.outputs.path }} + fail_on_unmatched_files: true + generate_release_notes: true diff --git a/.github/workflows/buildpacks-prepare-release.yml b/.github/workflows/buildpacks-prepare-release.yml new file mode 100644 index 00000000..4d949787 --- /dev/null +++ b/.github/workflows/buildpacks-prepare-release.yml @@ -0,0 +1,65 @@ +name: Prepare Buildpack Release + +on: + workflow_call: + inputs: + bump: + description: "Bump" + required: true + type: string + app_id: + description: "GitHub application ID" + type: string + default: ${{ vars.LINGUIST_GH_APP_ID }} + secrets: + app_private_key: + description: "Private key of the GitHub application" + type: string + default: ${{ secrets.LINGUIST_GH_PRIVATE_KEY }} + +permissions: + contents: write + pull-requests: write + +jobs: + prepare-release: + name: Prepare Release + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Bump versions and update changelogs + id: prepare + uses: heroku/languages-github-actions/.github/actions/prepare-release@main + with: + bump: ${{ inputs.bump }} + + - name: Generate changelog + id: generate-changelog + uses: heroku/languages-github-actions/.github/actions/generate-changelog@main + with: + version: ${{ steps.prepare.outputs.to_version }} + + - uses: heroku/use-app-token-action@main + id: generate-token + with: + app_id: ${{ inputs.app_id }} + private_key: ${{ secrets.app_private_key }} + + - name: Create pull request + id: pr + uses: peter-evans/create-pull-request@v5 + with: + token: ${{ steps.generate-token.outputs.token }} + title: Prepare release v${{ steps.prepare.outputs.to_version }} + commit-message: Prepare release v${{ steps.prepare.outputs.to_version }} + branch: prepare/v${{ steps.prepare.outputs.to_version }} + body: ${{ steps.generate-changelog.outputs.changelog }} + labels: "automation" + + - name: Configure pull request + if: steps.pr.outputs.pull-request-operation == 'created' + run: gh pr merge --auto --squash "${{ steps.pr.outputs.pull-request-number }}" + env: + GH_TOKEN: ${{ steps.generate-token.outputs.token }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..5a010f1e --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/target +/.idea +/node_modules diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..43f61c8e --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,886 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "camino" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "time", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "clap" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223163f58c9a40c3b0a43e1c4b50a9ce09f007ea2cb1ec258a687945b4b7929f" +dependencies = [ + "bitflags", + "clap_lex", +] + +[[package]] +name = "clap_derive" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.11", +] + +[[package]] +name = "clap_lex" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cxx" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn 2.0.11", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.11", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + +[[package]] +name = "fancy-regex" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" +dependencies = [ + "bit-set", + "regex", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "getrandom" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "iana-time-zone" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c17cc76786e99f8d2f055c11159e7f0091c42474dcc3189fbab96072e873e6d" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "languages-github-actions" +version = "0.0.1" +dependencies = [ + "chrono", + "clap", + "indexmap", + "lazy_static", + "libcnb-data", + "libcnb-package", + "markdown", + "rand", + "regex", + "serde", + "serde_json", + "toml_edit", + "uriparse", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" + +[[package]] +name = "libcnb-data" +version = "0.12.0" +source = "git+https://github.com/heroku/libcnb.rs?branch=feature/W-12731729_libcnb_package#eaf3393a5e96f930e4c3bbd3da417d05ab2b7f90" +dependencies = [ + "fancy-regex", + "libcnb-proc-macros", + "serde", + "thiserror", + "toml", + "uriparse", +] + +[[package]] +name = "libcnb-package" +version = "0.12.0" +source = "git+https://github.com/heroku/libcnb.rs?branch=feature/W-12731729_libcnb_package#eaf3393a5e96f930e4c3bbd3da417d05ab2b7f90" +dependencies = [ + "cargo_metadata", + "libcnb-data", + "petgraph", + "toml", + "which", +] + +[[package]] +name = "libcnb-proc-macros" +version = "0.12.0" +source = "git+https://github.com/heroku/libcnb.rs?branch=feature/W-12731729_libcnb_package#eaf3393a5e96f930e4c3bbd3da417d05ab2b7f90" +dependencies = [ + "cargo_metadata", + "fancy-regex", + "quote", + "syn 2.0.11", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "markdown" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c45dae11dd8af468c41201ace44dbbb3b1148215ac593cb8f0967b8d8d4b66c" +dependencies = [ + "unicode-id", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro2" +version = "1.0.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e472a104799c74b514a57226160104aa483546de37e839ec50e3c2e41dd87534" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "regex" +version = "1.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "scratch" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" + +[[package]] +name = "semver" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +dependencies = [ + "serde", +] + +[[package]] +name = "serde" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.11", +] + +[[package]] +name = "serde_json" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93107647184f6027e3b7dcb2e11034cf95ffa1e3a682c67951963ac69c1c007d" +dependencies = [ + "serde", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21e3787bb71465627110e7d87ed4faaa36c1f61042ee67badb9e2ef173accc40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.11", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "toml" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "unicode-id" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d70b6494226b36008c8366c288d77190b3fad2eb4c10533139c1c1f461127f1a" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "uriparse" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0200d0fc04d809396c2ad43f3c95da3582a2556eba8d453c1087f4120ee352ff" +dependencies = [ + "fnv", + "lazy_static", +] + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "winnow" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +dependencies = [ + "memchr", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..6d50dd56 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "languages-github-actions" +description = "GitHub Actions for the Languages Team" +version = "0.0.1" +repository = "https://github.com/heroku/languages-github-actions.git" +rust-version = "1.66" +edition = "2021" +publish = false + +[[bin]] +name = "actions" +path = "src/main.rs" + +[dependencies] +chrono = "0.4.24" +clap = { version = "4.2.1", default-features = false, features = [ + "derive", + "error-context", + "help", + "std", + "usage", +] } +indexmap = "1.9.3" +lazy_static = "1.4.0" +libcnb-data = { git = "https://github.com/heroku/libcnb.rs", branch = "feature/W-12731729_libcnb_package" } +libcnb-package = { git = "https://github.com/heroku/libcnb.rs", branch = "feature/W-12731729_libcnb_package" } +markdown = "1.0.0-alpha.9" +rand = "0.8.5" +regex = "1.8.3" +serde = { version = "1.0.145", features = ["derive"] } +serde_json = "1.0.96" +toml_edit = "0.19.10" +uriparse = "0.6.4" + +[dev-dependencies] diff --git a/README.md b/README.md index 8b137891..1dfb8def 100644 --- a/README.md +++ b/README.md @@ -1 +1,176 @@ +# Languages GitHub Actions +A set of custom GitHub Actions and reusable workflow used by the Languages Team. + +## Actions + +### Generate Buildpack Matrix + +This action generates a list of buildpack `id` and `path` values. E.g.; + +```json +[ + { + "id": "some/buildpack-id", + "path": "/path/to/some/buildpack" + }, + ... +] +``` + +This list can be used in subsequent jobs with `jobs..strategy.matrix.include` +which accepts a list of key/value objects and will create a single job per buildpack. + +See https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#expanding-or-adding-matrix-configurations + +#### Usage + +```yaml +- name: Generate Buildpack Matrix + uses: heroku/languages-github-actions/.github/actions/generate-buildpack-matrix@main +``` + +You can also pin to a [specific release](/releases) version in the format `@v{major}.{minor}.{patch}` + +#### Outputs + +| Name | Description | +|--------------|-----------------------------------------------------------------| +| `buildpacks` | The list of buildpack (id, path) keys formatted as a JSON array | + +### Generate Changelog + +Generates an aggregated changelist from all buildpacks within a project. + +#### Usage + +```yaml +- name: Generate Changelog + uses: heroku/languages-github-actions/.github/actions/generate-changelog@main +``` + +You can also pin to a [specific release](/releases) version in the format `@v{major}.{minor}.{patch}` + +#### Inputs + +| Name | Description | Required | Default | +|--------------|------------------------------------------------------------------|----------|---------| +| `unreleased` | If the changelog should be generated from the unreleased section | false | | +| `version` | If the changelog should be generated from a version section | false | | + +#### Outputs + +| Name | Description | +|-------------|--------------------------------------| +| `changelog` | Markdown content listing the changes | + +### Prepare Release + +Bumps the version of each detected buildpack and adds an entry for any unreleased changes from the changelog. + +#### Usage + +```yaml +- name: Prepare Buildpack Release + uses: heroku/languages-github-actions/.github/actions/prepare-release@main +``` + +You can also pin to a [specific release](/releases) version in the format `@v{major}.{minor}.{patch}` + +#### Inputs + +| Name | Description | Required | Default | +|------------------|--------------------------------------------------------------------------|----------|-----------------------------------------------| +| `bump` | Which coordinate should be incremented? (major, minor, patch) | true | | +| `repository_url` | The URL of the repository (e.g.; https://github.com/octocat/Hello-World) | false | `https://github.com/${{ github.repository }}` | + +#### Outputs + +| Name | Description | +|----------------|----------------------| +| `from_version` | The previous version | +| `to_version` | The next version | + +### Update Builder + +Updates all references to a buildpack in heroku/builder for the given list of builders. + +#### Usage + +```yaml +- name: Update Builder + uses: heroku/languages-github-actions/.github/actions/update-builder@main +``` + +You can also pin to a [specific release](/releases) version in the format `@v{major}.{minor}.{patch}` + +#### Inputs + +| Name | Description | Required | Default | +|---------------------|------------------------------------------------------|----------|--------------------| +| `buildpack_id` | The id of the buildpack | true | | +| `buildpack_version` | The version of the buildpack | true | | +| `buildpack_uri` | The URI of the published buildpack | true | | +| `builders` | A comma-separated list of builders to update | true | | +| `path` | Relative path under `GITHUB_WORKSPACE` to execute in | false | `GITHUB_WORKSPACE` | + +## Development + +Custom actions are written in [Rust](https://www.rust-lang.org/) and compiled into a command-line application that +exposes each action as a sub-command. + +```shell +Usage: actions + +Commands: + generate-buildpack-matrix Generates a JSON list of {id, path} entries for each buildpack detected + generate-changelog Generates an aggregated changelist from all buildpacks within a project. + prepare-release Bumps the version of each detected buildpack and adds an entry for any unreleased changes from the changelog + update-builder Updates all references to a buildpack in heroku/builder for the given list of builders + help Print this message or the help of the given subcommand(s) +``` + +This `actions` command is bootstraped into the GitHub Action environment using the script found at +[`.github/bootstrap/bootstrap.ts`](.github/bootstrap/bootstrap.ts) which attempts to download this command from this +repository's [releases](/releases) page. + +> **Note** +> +> Any changes made to this bootstrap script will need to be recompiled by running `npm run build` and committing the bundled +> script into GitHub. You'll need Node and NPM installed to do this. + +Each of the custom actions must import this bootstrap script to obtain access to the `actions` command line application and +then it must provide a list of arguments to invoke the target action. + +For example, this would invoke `actions generate-buildpack-matrix`: + +```javascript +require('../../bootstrap').invokeWith(() => { + return ['generate-buildpack-matrix'] +}) +``` + +And actions that declare inputs can forward those along to the command: + +```javascript +require('../../bootstrap').invokeWith(({ getInput }) => { + return [ + 'update-builder', + + '--path', + getInput('path', { required: true }), + + '--buildpack-id', + getInput('buildpack_id', { required: true }), + + '--buildpack-version', + getInput('buildpack_version', { required: true }), + + '--buildpack-uri', + getInput('buildpack_uri', { required: true }), + + '--builders', + getInput('builders', { required: true }), + ] +}) +``` diff --git a/dependabot.yml b/dependabot.yml new file mode 100644 index 00000000..83c70d54 --- /dev/null +++ b/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "monthly" + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..a258dc87 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,129 @@ +{ + "name": "languages-github-actions", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.1.1", + "@actions/tool-cache": "^2.0.1", + "toml": "^3.0.0" + }, + "devDependencies": { + "@types/node": "^18.16.0", + "@vercel/ncc": "^0.36.1", + "typescript": "^5.0.4" + } + }, + "node_modules/@actions/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", + "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/http-client": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", + "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", + "dependencies": { + "tunnel": "^0.0.6" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" + }, + "node_modules/@actions/tool-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz", + "integrity": "sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.1.1", + "semver": "^6.1.0", + "uuid": "^3.3.2" + } + }, + "node_modules/@actions/tool-cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/@types/node": { + "version": "18.16.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.16.0.tgz", + "integrity": "sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==", + "dev": true + }, + "node_modules/@vercel/ncc": { + "version": "0.36.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz", + "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==", + "dev": true, + "bin": { + "ncc": "dist/ncc/cli.js" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==" + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/typescript": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", + "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=12.20" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..54c9442e --- /dev/null +++ b/package.json @@ -0,0 +1,17 @@ +{ + "private": true, + "scripts": { + "build": "ncc build .github/bootstrap/bootstrap.ts -o .github/bootstrap" + }, + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.1.1", + "@actions/tool-cache": "^2.0.1", + "toml": "^3.0.0" + }, + "devDependencies": { + "@types/node": "^18.16.0", + "@vercel/ncc": "^0.36.1", + "typescript": "^5.0.4" + } +} diff --git a/src/changelog.rs b/src/changelog.rs new file mode 100644 index 00000000..bbb2fe6e --- /dev/null +++ b/src/changelog.rs @@ -0,0 +1,628 @@ +use chrono::{DateTime, LocalResult, TimeZone, Utc}; +use indexmap::IndexMap; +use lazy_static::lazy_static; +use markdown::mdast::Node; +use markdown::{to_mdast, ParseOptions}; +use regex::Regex; +use std::cmp::Ordering; +use std::collections::HashMap; +use std::fmt::{Display, Formatter}; +use std::num::ParseIntError; + +#[derive(Debug, Eq, PartialEq)] +pub(crate) struct Changelog { + pub(crate) unreleased: Option, + pub(crate) releases: IndexMap, +} + +impl TryFrom<&str> for Changelog { + type Error = ChangelogError; + + fn try_from(value: &str) -> Result { + lazy_static! { + static ref UNRELEASED_HEADER: Regex = + Regex::new(r"(?i)^\[?unreleased]?$").expect("Should be a valid regex"); + static ref VERSION_HEADER: Regex = + Regex::new(r"^\[?(\d+\.\d+\.\d+)]?.*(\d{4})[-/](\d{2})[-/](\d{2})") + .expect("Should be a valid regex"); + } + + let changelog_ast = + to_mdast(value, &ParseOptions::default()).map_err(ChangelogError::Parse)?; + + let mut current_header: Option = None; + let mut headers: Vec = vec![]; + let mut body_nodes_by_header: HashMap> = HashMap::new(); + + if let Node::Root(root) = changelog_ast { + for child in &root.children { + if let Node::Heading(heading) = child { + match heading.depth.cmp(&2) { + Ordering::Equal => { + headers.push(child.to_string()); + current_header = Some(child.to_string()); + } + Ordering::Less => { + current_header = None; + } + _ => { + if let Some(header) = ¤t_header { + let body_nodes = body_nodes_by_header + .entry(header.clone()) + .or_insert_with(Vec::new); + body_nodes.push(child); + } + } + } + } else if let Node::Definition(_) = child { + // ignore any defined links, these will be regenerated at display time + } else if let Some(header) = ¤t_header { + let body_nodes = body_nodes_by_header + .entry(header.clone()) + .or_insert_with(Vec::new); + body_nodes.push(child); + } + } + + let mut unreleased = None; + let mut releases = IndexMap::new(); + + for header in headers { + let empty_nodes = vec![]; + let body_nodes = body_nodes_by_header.get(&header).unwrap_or(&empty_nodes); + + let start = body_nodes + .iter() + .next() + .map(|node| node.position().map(|position| position.start.offset)) + .unwrap_or_default(); + let end = body_nodes + .iter() + .last() + .map(|node| node.position().map(|position| position.end.offset)) + .unwrap_or_default(); + + let body = if let (Some(start), Some(end)) = (start, end) { + &value[start..end] + } else { + "" + }; + + let body = body.trim().to_string(); + + if UNRELEASED_HEADER.is_match(&header) && !body.is_empty() { + unreleased = Some(body); + } else if let Some(captures) = VERSION_HEADER.captures(&header) { + let version = captures[1].to_string(); + let year = captures[2] + .parse::() + .map_err(ChangelogError::ParseReleaseEntryYear)?; + let month = captures[3] + .parse::() + .map_err(ChangelogError::ParseReleaseEntryMonth)?; + let day = captures[4] + .parse::() + .map_err(ChangelogError::ParseReleaseEntryDay)?; + let date = match Utc.with_ymd_and_hms(year, month, day, 0, 0, 0) { + LocalResult::None => Err(ChangelogError::InvalidReleaseDate), + LocalResult::Single(value) => Ok(value), + LocalResult::Ambiguous(_, _) => Err(ChangelogError::AmbiguousReleaseDate), + }?; + let release_entry = ReleaseEntry { + version: version.clone(), + body, + date, + }; + releases.insert(version, release_entry); + } + } + + Ok(Changelog { + unreleased, + releases, + }) + } else { + Err(ChangelogError::NoRootNode) + } + } +} + +impl Display for Changelog { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + r#" +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + "# + .trim() + )?; + + if let Some(unreleased) = &self.unreleased { + write!(f, "\n\n## [Unreleased]\n\n{}", unreleased.trim())?; + } else { + write!(f, "\n\n## [Unreleased]")?; + } + + for entry in self.releases.values() { + write!( + f, + "\n\n## [{}] - {}\n\n{}", + entry.version, + entry.date.format("%Y-%m-%d"), + entry.body.trim() + )?; + } + + writeln!(f) + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub(crate) struct ReleaseEntry { + pub(crate) version: String, + pub(crate) date: DateTime, + pub(crate) body: String, +} + +#[derive(Debug)] +pub(crate) enum ChangelogError { + NoRootNode, + Parse(String), + ParseReleaseEntryYear(ParseIntError), + ParseReleaseEntryMonth(ParseIntError), + ParseReleaseEntryDay(ParseIntError), + InvalidReleaseDate, + AmbiguousReleaseDate, +} + +impl Display for ChangelogError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + ChangelogError::NoRootNode => { + write!(f, "No root node in changelog markdown") + } + ChangelogError::Parse(error) => { + write!(f, "Could not parse changelog - {error}") + } + ChangelogError::ParseReleaseEntryYear(error) => { + write!(f, "Invalid year in release entry - {error}") + } + ChangelogError::ParseReleaseEntryMonth(error) => { + write!(f, "Invalid month in release entry - {error}") + } + ChangelogError::ParseReleaseEntryDay(error) => { + write!(f, "Invalid day in release entry - {error}") + } + ChangelogError::InvalidReleaseDate => { + write!(f, "Invalid date in release entry") + } + ChangelogError::AmbiguousReleaseDate => { + write!(f, "Ambiguous date in release entry") + } + } + } +} + +pub(crate) fn generate_release_declarations>( + changelog: &Changelog, + repository: S, +) -> String { + let repository = repository.into(); + let mut versions = changelog.releases.keys(); + let mut declarations = vec![]; + let mut previous_version = versions.next(); + + declarations.push(if let Some(version) = previous_version { + format!("[unreleased]: {repository}/compare/v{version}...HEAD") + } else { + format!("[unreleased]: {repository}") + }); + + for next_version in versions { + if let Some(version) = previous_version { + declarations.push(format!( + "[{version}]: {repository}/compare/v{next_version}...v{version}" + )); + } + previous_version = Some(next_version) + } + + if let Some(version) = previous_version { + declarations.push(format!("[{version}]: {repository}/releases/tag/v{version}")); + } + + declarations.join("\n") +} + +#[cfg(test)] +mod test { + use crate::changelog::{generate_release_declarations, Changelog}; + use chrono::{TimeZone, Utc}; + + #[test] + fn test_keep_a_changelog_unreleased_entry_with_changes_parsing() { + let changelog = Changelog::try_from("## [Unreleased]\n\n- Some changes").unwrap(); + assert_eq!(changelog.unreleased, Some("- Some changes".to_string())); + } + + #[test] + fn test_keep_a_changelog_unreleased_entry_with_no_changes_parsing() { + let changelog = Changelog::try_from(KEEP_A_CHANGELOG_1_0_0).unwrap(); + assert_eq!(changelog.unreleased, None); + } + + #[test] + fn test_keep_a_changelog_release_entry_parsing() { + let changelog = Changelog::try_from(KEEP_A_CHANGELOG_1_0_0).unwrap(); + let release_entry = changelog.releases.get("1.1.1").unwrap(); + assert_eq!(release_entry.version, "1.1.1"); + assert_eq!( + release_entry.date, + Utc.with_ymd_and_hms(2023, 3, 5, 0, 0, 0).unwrap() + ); + assert_eq!( + release_entry.body, + r#"### Added + +- Arabic translation (#444). +- v1.1 French translation. +- v1.1 Dutch translation (#371). +- v1.1 Russian translation (#410). +- v1.1 Japanese translation (#363). +- v1.1 Norwegian Bokmål translation (#383). +- v1.1 "Inconsistent Changes" Turkish translation (#347). +- Default to most recent versions available for each languages +- Display count of available translations (26 to date!) +- Centralize all links into `/data/links.json` so they can be updated easily + +### Fixed + +- Improve French translation (#377). +- Improve id-ID translation (#416). +- Improve Persian translation (#457). +- Improve Russian translation (#408). +- Improve Swedish title (#419). +- Improve zh-CN translation (#359). +- Improve French translation (#357). +- Improve zh-TW translation (#360, #355). +- Improve Spanish (es-ES) transltion (#362). +- Foldout menu in Dutch translation (#371). +- Missing periods at the end of each change (#451). +- Fix missing logo in 1.1 pages +- Display notice when translation isn't for most recent version +- Various broken links, page versions, and indentations. + +### Changed + +- Upgrade dependencies: Ruby 3.2.1, Middleman, etc. + +### Removed + +- Unused normalize.css file +- Identical links assigned in each translation file +- Duplicate index file for the english version"# + ); + } + + #[test] + fn test_release_entry_parsing_with_alternate_date_format() { + let changelog = Changelog::try_from( + "## [Unreleased]\n\n## [1.0.10] 2023/05/10\n- Upgrade libcnb to 0.12.0", + ) + .unwrap(); + let release_entry = changelog.releases.get("1.0.10").unwrap(); + assert_eq!(release_entry.version, "1.0.10"); + assert_eq!( + release_entry.date, + Utc.with_ymd_and_hms(2023, 5, 10, 0, 0, 0).unwrap() + ); + assert_eq!(release_entry.body, "- Upgrade libcnb to 0.12.0"); + } + + #[test] + fn test_keep_a_changelog_parses_all_release_entries() { + let changelog = Changelog::try_from(KEEP_A_CHANGELOG_1_0_0).unwrap(); + let releases = changelog.releases.keys().collect::>(); + assert_eq!( + releases, + vec![ + "1.1.1", "1.1.0", "1.0.0", "0.3.0", "0.2.0", "0.1.0", "0.0.8", "0.0.7", "0.0.6", + "0.0.5", "0.0.4", "0.0.3", "0.0.2", "0.0.1", + ] + ); + } + + #[test] + fn test_keep_a_changelog_to_string() { + let changelog = Changelog::try_from(KEEP_A_CHANGELOG_1_0_0).unwrap(); + assert_eq!(changelog.to_string(), KEEP_A_CHANGELOG_1_0_0); + } + + #[test] + fn test_generate_release_declarations() { + let changelog = Changelog::try_from(KEEP_A_CHANGELOG_1_0_0).unwrap(); + let declarations = generate_release_declarations( + &changelog, + "https://github.com/olivierlacan/keep-a-changelog", + ); + assert_eq!( + declarations, + r#"[unreleased]: https://github.com/olivierlacan/keep-a-changelog/compare/v1.1.1...HEAD +[1.1.1]: https://github.com/olivierlacan/keep-a-changelog/compare/v1.1.0...v1.1.1 +[1.1.0]: https://github.com/olivierlacan/keep-a-changelog/compare/v1.0.0...v1.1.0 +[1.0.0]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.3.0...v1.0.0 +[0.3.0]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.2.0...v0.3.0 +[0.2.0]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.1.0...v0.2.0 +[0.1.0]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.8...v0.1.0 +[0.0.8]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.7...v0.0.8 +[0.0.7]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.6...v0.0.7 +[0.0.6]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.5...v0.0.6 +[0.0.5]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.4...v0.0.5 +[0.0.4]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.3...v0.0.4 +[0.0.3]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.2...v0.0.3 +[0.0.2]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.1...v0.0.2 +[0.0.1]: https://github.com/olivierlacan/keep-a-changelog/releases/tag/v0.0.1"# + ); + } + + #[test] + fn test_generate_release_declarations_with_no_releases() { + let changelog = Changelog::try_from("[Unreleased]").unwrap(); + let declarations = generate_release_declarations( + &changelog, + "https://github.com/olivierlacan/keep-a-changelog", + ); + assert_eq!( + declarations, + "[unreleased]: https://github.com/olivierlacan/keep-a-changelog" + ); + } + + #[test] + fn test_generate_release_declarations_with_only_one_release() { + let changelog = + Changelog::try_from("[Unreleased]\n## [0.0.1] - 2023-03-05\n\n- Some change\n") + .unwrap(); + let declarations = generate_release_declarations( + &changelog, + "https://github.com/olivierlacan/keep-a-changelog", + ); + assert_eq!( + declarations, + "[unreleased]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.0.1...HEAD\n[0.0.1]: https://github.com/olivierlacan/keep-a-changelog/releases/tag/v0.0.1" + ); + } + + const KEEP_A_CHANGELOG_1_0_0: &str = r#"# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [1.1.1] - 2023-03-05 + +### Added + +- Arabic translation (#444). +- v1.1 French translation. +- v1.1 Dutch translation (#371). +- v1.1 Russian translation (#410). +- v1.1 Japanese translation (#363). +- v1.1 Norwegian Bokmål translation (#383). +- v1.1 "Inconsistent Changes" Turkish translation (#347). +- Default to most recent versions available for each languages +- Display count of available translations (26 to date!) +- Centralize all links into `/data/links.json` so they can be updated easily + +### Fixed + +- Improve French translation (#377). +- Improve id-ID translation (#416). +- Improve Persian translation (#457). +- Improve Russian translation (#408). +- Improve Swedish title (#419). +- Improve zh-CN translation (#359). +- Improve French translation (#357). +- Improve zh-TW translation (#360, #355). +- Improve Spanish (es-ES) transltion (#362). +- Foldout menu in Dutch translation (#371). +- Missing periods at the end of each change (#451). +- Fix missing logo in 1.1 pages +- Display notice when translation isn't for most recent version +- Various broken links, page versions, and indentations. + +### Changed + +- Upgrade dependencies: Ruby 3.2.1, Middleman, etc. + +### Removed + +- Unused normalize.css file +- Identical links assigned in each translation file +- Duplicate index file for the english version + +## [1.1.0] - 2019-02-15 + +### Added + +- Danish translation (#297). +- Georgian translation from (#337). +- Changelog inconsistency section in Bad Practices. + +### Fixed + +- Italian translation (#332). +- Indonesian translation (#336). + +## [1.0.0] - 2017-06-20 + +### Added + +- New visual identity by [@tylerfortune8](https://github.com/tylerfortune8). +- Version navigation. +- Links to latest released version in previous versions. +- "Why keep a changelog?" section. +- "Who needs a changelog?" section. +- "How do I make a changelog?" section. +- "Frequently Asked Questions" section. +- New "Guiding Principles" sub-section to "How do I make a changelog?". +- Simplified and Traditional Chinese translations from [@tianshuo](https://github.com/tianshuo). +- German translation from [@mpbzh](https://github.com/mpbzh) & [@Art4](https://github.com/Art4). +- Italian translation from [@azkidenz](https://github.com/azkidenz). +- Swedish translation from [@magol](https://github.com/magol). +- Turkish translation from [@emreerkan](https://github.com/emreerkan). +- French translation from [@zapashcanon](https://github.com/zapashcanon). +- Brazilian Portuguese translation from [@Webysther](https://github.com/Webysther). +- Polish translation from [@amielucha](https://github.com/amielucha) & [@m-aciek](https://github.com/m-aciek). +- Russian translation from [@aishek](https://github.com/aishek). +- Czech translation from [@h4vry](https://github.com/h4vry). +- Slovak translation from [@jkostolansky](https://github.com/jkostolansky). +- Korean translation from [@pierceh89](https://github.com/pierceh89). +- Croatian translation from [@porx](https://github.com/porx). +- Persian translation from [@Hameds](https://github.com/Hameds). +- Ukrainian translation from [@osadchyi-s](https://github.com/osadchyi-s). + +### Changed + +- Start using "changelog" over "change log" since it's the common usage. +- Start versioning based on the current English version at 0.3.0 to help + translation authors keep things up-to-date. +- Rewrite "What makes unicorns cry?" section. +- Rewrite "Ignoring Deprecations" sub-section to clarify the ideal + scenario. +- Improve "Commit log diffs" sub-section to further argument against + them. +- Merge "Why can’t people just use a git log diff?" with "Commit log + diffs". +- Fix typos in Simplified Chinese and Traditional Chinese translations. +- Fix typos in Brazilian Portuguese translation. +- Fix typos in Turkish translation. +- Fix typos in Czech translation. +- Fix typos in Swedish translation. +- Improve phrasing in French translation. +- Fix phrasing and spelling in German translation. + +### Removed + +- Section about "changelog" vs "CHANGELOG". + +## [0.3.0] - 2015-12-03 + +### Added + +- RU translation from [@aishek](https://github.com/aishek). +- pt-BR translation from [@tallesl](https://github.com/tallesl). +- es-ES translation from [@ZeliosAriex](https://github.com/ZeliosAriex). + +## [0.2.0] - 2015-10-06 + +### Changed + +- Remove exclusionary mentions of "open source" since this project can + benefit both "open" and "closed" source projects equally. + +## [0.1.0] - 2015-10-06 + +### Added + +- Answer "Should you ever rewrite a change log?". + +### Changed + +- Improve argument against commit logs. +- Start following [SemVer](https://semver.org) properly. + +## [0.0.8] - 2015-02-17 + +### Changed + +- Update year to match in every README example. +- Reluctantly stop making fun of Brits only, since most of the world + writes dates in a strange way. + +### Fixed + +- Fix typos in recent README changes. +- Update outdated unreleased diff link. + +## [0.0.7] - 2015-02-16 + +### Added + +- Link, and make it obvious that date format is ISO 8601. + +### Changed + +- Clarified the section on "Is there a standard change log format?". + +### Fixed + +- Fix Markdown links to tag comparison URL with footnote-style links. + +## [0.0.6] - 2014-12-12 + +### Added + +- README section on "yanked" releases. + +## [0.0.5] - 2014-08-09 + +### Added + +- Markdown links to version tags on release headings. +- Unreleased section to gather unreleased changes and encourage note + keeping prior to releases. + +## [0.0.4] - 2014-08-09 + +### Added + +- Better explanation of the difference between the file ("CHANGELOG") + and its function "the change log". + +### Changed + +- Refer to a "change log" instead of a "CHANGELOG" throughout the site + to differentiate between the file and the purpose of the file — the + logging of changes. + +### Removed + +- Remove empty sections from CHANGELOG, they occupy too much space and + create too much noise in the file. People will have to assume that the + missing sections were intentionally left out because they contained no + notable changes. + +## [0.0.3] - 2014-08-09 + +### Added + +- "Why should I care?" section mentioning The Changelog podcast. + +## [0.0.2] - 2014-07-10 + +### Added + +- Explanation of the recommended reverse chronological release ordering. + +## [0.0.1] - 2014-05-31 + +### Added + +- This CHANGELOG file to hopefully serve as an evolving example of a + standardized open source project CHANGELOG. +- CNAME file to enable GitHub Pages custom domain. +- README now contains answers to common questions about CHANGELOGs. +- Good examples and basic guidelines, including proper date formatting. +- Counter-examples: "What makes unicorns cry?". +"#; +} diff --git a/src/commands/generate_buildpack_matrix/command.rs b/src/commands/generate_buildpack_matrix/command.rs new file mode 100644 index 00000000..83dfe390 --- /dev/null +++ b/src/commands/generate_buildpack_matrix/command.rs @@ -0,0 +1,36 @@ +use crate::commands::generate_buildpack_matrix::errors::Error; +use crate::github::actions; +use clap::Parser; +use libcnb_package::{find_buildpack_dirs, read_buildpack_data}; +use std::collections::HashMap; + +type Result = std::result::Result; + +#[derive(Parser, Debug)] +#[command(author, version, about = "Generates a JSON list of {id, path} entries for each buildpack detected", long_about = None)] +pub(crate) struct GenerateBuildpackMatrixArgs; + +pub(crate) fn execute(_: GenerateBuildpackMatrixArgs) -> Result<()> { + let current_dir = std::env::current_dir().map_err(Error::GetCurrentDir)?; + + let buildpacks = find_buildpack_dirs(¤t_dir, &[current_dir.join("target")]) + .map_err(|e| Error::FindingBuildpacks(current_dir.clone(), e))? + .into_iter() + .map(|dir| { + read_buildpack_data(&dir) + .map_err(Error::ReadingBuildpackData) + .map(|data| { + HashMap::from([ + ("id", data.buildpack_descriptor.buildpack().id.to_string()), + ("path", dir.to_string_lossy().to_string()), + ]) + }) + }) + .collect::>>()?; + + let json = serde_json::to_string(&buildpacks).map_err(Error::SerializingJson)?; + + actions::set_output("buildpacks", json).map_err(Error::SetActionOutput)?; + + Ok(()) +} diff --git a/src/commands/generate_buildpack_matrix/errors.rs b/src/commands/generate_buildpack_matrix/errors.rs new file mode 100644 index 00000000..1c290ce9 --- /dev/null +++ b/src/commands/generate_buildpack_matrix/errors.rs @@ -0,0 +1,61 @@ +use crate::github::actions::SetOutputError; +use libcnb_package::ReadBuildpackDataError; +use std::fmt::{Display, Formatter}; +use std::path::PathBuf; + +#[derive(Debug)] +pub(crate) enum Error { + GetCurrentDir(std::io::Error), + FindingBuildpacks(PathBuf, std::io::Error), + ReadingBuildpackData(ReadBuildpackDataError), + SerializingJson(serde_json::Error), + SetActionOutput(SetOutputError), +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Error::GetCurrentDir(error) => { + write!(f, "Failed to get current directory\nError: {error}") + } + + Error::FindingBuildpacks(path, error) => { + write!( + f, + "I/O error while finding buildpacks\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::SetActionOutput(set_output_error) => match set_output_error { + SetOutputError::Opening(error) | SetOutputError::Writing(error) => { + write!(f, "Could not write action output\nError: {error}") + } + }, + + Error::SerializingJson(error) => { + write!( + f, + "Could not serialize buildpacks into json\nError: {error}" + ) + } + + Error::ReadingBuildpackData(error) => match error { + ReadBuildpackDataError::ReadingBuildpack { path, source } => { + write!( + f, + "Failed to read buildpack\nPath: {}\nError: {source}", + path.display() + ) + } + ReadBuildpackDataError::ParsingBuildpack { path, source } => { + write!( + f, + "Failed to parse buildpack\nPath: {}\nError: {source}", + path.display() + ) + } + }, + } + } +} diff --git a/src/commands/generate_buildpack_matrix/mod.rs b/src/commands/generate_buildpack_matrix/mod.rs new file mode 100644 index 00000000..7d8e5d60 --- /dev/null +++ b/src/commands/generate_buildpack_matrix/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod command; +pub(crate) mod errors; + +pub(crate) use command::execute; diff --git a/src/commands/generate_changelog/command.rs b/src/commands/generate_changelog/command.rs new file mode 100644 index 00000000..7118a339 --- /dev/null +++ b/src/commands/generate_changelog/command.rs @@ -0,0 +1,129 @@ +use crate::changelog::Changelog; +use crate::commands::generate_changelog::errors::Error; +use crate::github::actions; +use clap::Parser; +use libcnb_data::buildpack::BuildpackId; +use libcnb_package::{find_buildpack_dirs, read_buildpack_data}; +use std::collections::{BTreeMap, HashMap}; +use std::path::PathBuf; + +type Result = std::result::Result; + +#[derive(Parser, Debug)] +#[command(author, version, about = "Generates an aggregated changelist from all buildpacks within a project.", long_about = None, disable_version_flag = true)] +pub(crate) struct GenerateChangelogArgs { + #[arg(long, group = "section")] + unreleased: bool, + #[arg(long, group = "section")] + version: Option, +} + +enum ChangelogEntryType { + Unreleased, + Version(String), +} + +pub(crate) fn execute(args: GenerateChangelogArgs) -> Result<()> { + let current_dir = std::env::current_dir().map_err(Error::GetCurrentDir)?; + + let buildpack_dirs = find_buildpack_dirs(¤t_dir, &[current_dir.join("target")]) + .map_err(|e| Error::FindingBuildpacks(current_dir.clone(), e))?; + + let changelog_entry_type = match args.version { + Some(version) => ChangelogEntryType::Version(version), + None => ChangelogEntryType::Unreleased, + }; + + let changes_by_buildpack = buildpack_dirs + .iter() + .map(|dir| { + read_buildpack_data(dir) + .map_err(Error::GetBuildpackId) + .map(|data| data.buildpack_descriptor.buildpack().id.clone()) + .and_then(|buildpack_id| { + read_changelog_entry(dir.join("CHANGELOG.md"), &changelog_entry_type) + .map(|contents| (buildpack_id, contents)) + }) + }) + .collect::>>()?; + + let changelog = generate_changelog(&changes_by_buildpack); + + actions::set_output("changelog", changelog).map_err(Error::SetActionOutput)?; + + Ok(()) +} + +fn read_changelog_entry( + path: PathBuf, + changelog_entry_type: &ChangelogEntryType, +) -> Result>> { + let contents = + std::fs::read_to_string(&path).map_err(|e| Error::ReadingChangelog(path.clone(), e))?; + let changelog = Changelog::try_from(contents.as_str()) + .map_err(|e| Error::ParsingChangelog(path.clone(), e))?; + Ok(match changelog_entry_type { + ChangelogEntryType::Unreleased => Some(changelog.unreleased), + ChangelogEntryType::Version(version) => changelog + .releases + .get(version) + .map(|entry| Some(entry.body.clone())), + }) +} + +fn generate_changelog( + changes_by_buildpack: &HashMap>>, +) -> String { + let changelog = changes_by_buildpack + .iter() + .map(|(buildpack_id, changes)| (buildpack_id.to_string(), changes)) + .collect::>() + .into_iter() + .filter_map(|(buildpack_id, changes)| { + changes.as_ref().map(|contents| match contents { + Some(value) => format!("# {buildpack_id}\n\n{value}"), + None => format!("# {buildpack_id}\n\n- No changes"), + }) + }) + .collect::>() + .join("\n\n"); + format!("{}\n\n", changelog.trim()) +} + +#[cfg(test)] +mod test { + use crate::commands::generate_changelog::command::generate_changelog; + use libcnb_data::buildpack_id; + use std::collections::HashMap; + + #[test] + fn test_generating_changelog() { + let values = HashMap::from([ + (buildpack_id!("c"), Some(Some("- change c.1".to_string()))), + ( + buildpack_id!("a"), + Some(Some("- change a.1\n- change a.2".to_string())), + ), + (buildpack_id!("b"), None), + (buildpack_id!("d"), Some(None)), + ]); + + assert_eq!( + generate_changelog(&values), + r#"# a + +- change a.1 +- change a.2 + +# c + +- change c.1 + +# d + +- No changes + +"# + ) + } +} diff --git a/src/commands/generate_changelog/errors.rs b/src/commands/generate_changelog/errors.rs new file mode 100644 index 00000000..1b3af466 --- /dev/null +++ b/src/commands/generate_changelog/errors.rs @@ -0,0 +1,73 @@ +use crate::changelog::ChangelogError; +use crate::github::actions::SetOutputError; +use libcnb_package::ReadBuildpackDataError; +use std::fmt::{Display, Formatter}; +use std::path::PathBuf; + +#[derive(Debug)] +pub(crate) enum Error { + GetCurrentDir(std::io::Error), + FindingBuildpacks(PathBuf, std::io::Error), + GetBuildpackId(ReadBuildpackDataError), + ReadingChangelog(PathBuf, std::io::Error), + ParsingChangelog(PathBuf, ChangelogError), + SetActionOutput(SetOutputError), +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Error::GetCurrentDir(error) => { + write!(f, "Failed to get current directory\nError: {error}") + } + + Error::FindingBuildpacks(path, error) => { + write!( + f, + "I/O error while finding buildpacks\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::GetBuildpackId(read_buildpack_data_error) => match read_buildpack_data_error { + ReadBuildpackDataError::ReadingBuildpack { path, source } => { + write!( + f, + "Error reading buildpack\nPath: {}\nError: {source}", + path.display() + ) + } + + ReadBuildpackDataError::ParsingBuildpack { path, source } => { + write!( + f, + "Error parsing buildpack\nPath: {}\nError: {source}", + path.display() + ) + } + }, + + Error::SetActionOutput(set_output_error) => match set_output_error { + SetOutputError::Opening(error) | SetOutputError::Writing(error) => { + write!(f, "Could not write action output\nError: {error}") + } + }, + + Error::ReadingChangelog(path, error) => { + write!( + f, + "Could not read changelog\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::ParsingChangelog(path, error) => { + write!( + f, + "Could not parse changelog\nPath: {}\nError: {error}", + path.display() + ) + } + } + } +} diff --git a/src/commands/generate_changelog/mod.rs b/src/commands/generate_changelog/mod.rs new file mode 100644 index 00000000..7d8e5d60 --- /dev/null +++ b/src/commands/generate_changelog/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod command; +pub(crate) mod errors; + +pub(crate) use command::execute; diff --git a/src/commands/mod.rs b/src/commands/mod.rs new file mode 100644 index 00000000..5b3ee318 --- /dev/null +++ b/src/commands/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod generate_buildpack_matrix; +pub(crate) mod generate_changelog; +pub(crate) mod prepare_release; +pub(crate) mod update_builder; diff --git a/src/commands/prepare_release/command.rs b/src/commands/prepare_release/command.rs new file mode 100644 index 00000000..4c7eb229 --- /dev/null +++ b/src/commands/prepare_release/command.rs @@ -0,0 +1,739 @@ +use crate::changelog::{generate_release_declarations, Changelog, ReleaseEntry}; +use crate::commands::prepare_release::errors::Error; +use crate::github::actions; +use chrono::{DateTime, Utc}; +use clap::{Parser, ValueEnum}; +use indexmap::IndexMap; +use libcnb_data::buildpack::{BuildpackId, BuildpackVersion}; +use libcnb_package::find_buildpack_dirs; +use std::collections::{HashMap, HashSet}; +use std::fs::write; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use toml_edit::{value, ArrayOfTables, Document, Table}; +use uriparse::URI; + +type Result = std::result::Result; + +#[derive(Parser, Debug)] +#[command(author, version, about = "Bumps the version of each detected buildpack and adds an entry for any unreleased changes from the changelog", long_about = None)] +pub(crate) struct PrepareReleaseArgs { + #[arg(long, value_enum)] + pub(crate) bump: BumpCoordinate, + #[arg(long)] + pub(crate) repository_url: Option, +} + +#[derive(ValueEnum, Debug, Clone)] +pub(crate) enum BumpCoordinate { + Major, + Minor, + Patch, +} + +struct BuildpackFile { + path: PathBuf, + document: Document, +} + +struct ChangelogFile { + path: PathBuf, + changelog: Changelog, +} + +pub(crate) fn execute(args: PrepareReleaseArgs) -> Result<()> { + let current_dir = std::env::current_dir().map_err(Error::GetCurrentDir)?; + + let repository_url = args + .repository_url + .map(|url| { + URI::try_from(url.as_str()) + .map(|uri| uri.into_owned()) + .map_err(|e| Error::InvalidRepositoryUrl(url.clone(), e)) + }) + .transpose()?; + + let buildpack_dirs = find_buildpack_dirs(¤t_dir, &[current_dir.join("target")]) + .map_err(|e| Error::FindingBuildpacks(current_dir.clone(), e))?; + + if buildpack_dirs.is_empty() { + Err(Error::NoBuildpacksFound(current_dir))?; + } + + let buildpack_files = buildpack_dirs + .iter() + .map(|dir| read_buildpack_file(dir.join("buildpack.toml"))) + .collect::>>()?; + + let changelog_files = buildpack_dirs + .iter() + .map(|dir| read_changelog_file(dir.join("CHANGELOG.md"))) + .collect::>>()?; + + let updated_buildpack_ids = buildpack_files + .iter() + .map(get_buildpack_id) + .collect::>>()?; + + let current_version = get_fixed_version(&buildpack_files)?; + + let next_version = get_next_version(¤t_version, args.bump); + + for (mut buildpack_file, changelog_file) in buildpack_files.into_iter().zip(changelog_files) { + let updated_dependencies = get_buildpack_dependency_ids(&buildpack_file)? + .into_iter() + .filter(|buildpack_id| updated_buildpack_ids.contains(buildpack_id)) + .collect::>(); + + let new_buildpack_contents = update_buildpack_contents_with_new_version( + &mut buildpack_file, + &next_version, + &updated_dependencies, + )?; + + write(&buildpack_file.path, new_buildpack_contents) + .map_err(|e| Error::WritingBuildpack(buildpack_file.path.clone(), e))?; + + eprintln!( + "✅️ Updated version {current_version} → {next_version}: {}", + buildpack_file.path.display(), + ); + + let new_changelog = promote_changelog_unreleased_to_version( + &changelog_file.changelog, + &next_version, + &Utc::now(), + &updated_dependencies, + ); + + let changelog_contents = match &repository_url { + Some(repository) => { + let release_declarations = + generate_release_declarations(&new_changelog, repository.to_string()); + format!("{new_changelog}\n{release_declarations}") + } + None => new_changelog.to_string(), + }; + + write(&changelog_file.path, changelog_contents) + .map_err(|e| Error::WritingChangelog(changelog_file.path.clone(), e))?; + + eprintln!( + "✅️ Added release entry {next_version}: {}", + changelog_file.path.display() + ); + } + + actions::set_output("from_version", current_version.to_string()) + .map_err(Error::SetActionOutput)?; + actions::set_output("to_version", next_version.to_string()).map_err(Error::SetActionOutput)?; + + Ok(()) +} + +fn read_buildpack_file(path: PathBuf) -> Result { + let contents = + std::fs::read_to_string(&path).map_err(|e| Error::ReadingBuildpack(path.clone(), e))?; + let document = + Document::from_str(&contents).map_err(|e| Error::ParsingBuildpack(path.clone(), e))?; + Ok(BuildpackFile { path, document }) +} + +fn read_changelog_file(path: PathBuf) -> Result { + let contents = + std::fs::read_to_string(&path).map_err(|e| Error::ReadingChangelog(path.clone(), e))?; + let changelog = Changelog::try_from(contents.as_str()) + .map_err(|e| Error::ParsingChangelog(path.clone(), e))?; + Ok(ChangelogFile { path, changelog }) +} + +fn get_buildpack_id(buildpack_file: &BuildpackFile) -> Result { + let buildpack_id = buildpack_file + .document + .get("buildpack") + .and_then(|value| value.as_table_like()) + .and_then(|buildpack| buildpack.get("id")) + .and_then(|id| id.as_str().map(|v| v.to_string())) + .ok_or(Error::MissingRequiredField( + buildpack_file.path.clone(), + "buildpack.id".to_string(), + ))?; + buildpack_id + .parse() + .map_err(|_| Error::InvalidBuildpackId(buildpack_file.path.clone(), buildpack_id.clone())) +} + +fn get_buildpack_version(buildpack_file: &BuildpackFile) -> Result { + let version = buildpack_file + .document + .get("buildpack") + .and_then(|value| value.as_table_like()) + .and_then(|buildpack| buildpack.get("version")) + .and_then(|version| version.as_str().map(|v| v.to_string())) + .ok_or(Error::MissingRequiredField( + buildpack_file.path.clone(), + "buildpack.version".to_string(), + ))?; + BuildpackVersion::try_from(version.clone()) + .map_err(|_| Error::InvalidBuildpackVersion(buildpack_file.path.clone(), version)) +} + +fn get_buildpack_dependency_ids(buildpack_file: &BuildpackFile) -> Result> { + buildpack_file + .document + .get("order") + .and_then(|value| value.as_array_of_tables()) + .unwrap_or(&ArrayOfTables::default()) + .iter() + .flat_map(|order| { + order + .get("group") + .and_then(|value| value.as_array_of_tables()) + .unwrap_or(&ArrayOfTables::default()) + .iter() + .map(|group| get_group_buildpack_id(group, &buildpack_file.path)) + .collect::>() + }) + .collect::>>() +} + +fn get_group_buildpack_id(group: &Table, path: &Path) -> Result { + group + .get("id") + .and_then(|id| id.as_str()) + .ok_or(Error::MissingRequiredField( + path.to_path_buf(), + "order[].group[].id".to_string(), + )) + .and_then(|id| { + id.parse::() + .map_err(|_| Error::InvalidBuildpackId(path.to_path_buf(), id.to_string())) + }) +} + +fn get_fixed_version(buildpack_files: &[BuildpackFile]) -> Result { + let version_map = buildpack_files + .iter() + .map(|buildpack_file| { + get_buildpack_version(buildpack_file) + .map(|version| (buildpack_file.path.clone(), version)) + }) + .collect::>>()?; + + let versions = version_map + .values() + .map(|version| version.to_string()) + .collect::>(); + + if versions.len() != 1 { + return Err(Error::NotAllVersionsMatch(version_map)); + } + + version_map + .into_iter() + .next() + .map(|(_, version)| version) + .ok_or(Error::NoFixedVersion) +} + +fn get_next_version(current_version: &BuildpackVersion, bump: BumpCoordinate) -> BuildpackVersion { + let BuildpackVersion { + major, + minor, + patch, + } = current_version; + + match bump { + BumpCoordinate::Major => BuildpackVersion { + major: major + 1, + minor: 0, + patch: 0, + }, + BumpCoordinate::Minor => BuildpackVersion { + major: *major, + minor: minor + 1, + patch: 0, + }, + BumpCoordinate::Patch => BuildpackVersion { + major: *major, + minor: *minor, + patch: patch + 1, + }, + } +} + +fn update_buildpack_contents_with_new_version( + buildpack_file: &mut BuildpackFile, + next_version: &BuildpackVersion, + updated_dependencies: &[BuildpackId], +) -> Result { + let buildpack = buildpack_file + .document + .get_mut("buildpack") + .and_then(|value| value.as_table_like_mut()) + .ok_or(Error::MissingRequiredField( + buildpack_file.path.clone(), + "buildpack".to_string(), + ))?; + + buildpack.insert("version", value(next_version.to_string())); + + let mut empty_orders = ArrayOfTables::default(); + let mut empty_groups = ArrayOfTables::default(); + + let orders = buildpack_file + .document + .get_mut("order") + .and_then(|value| value.as_array_of_tables_mut()) + .unwrap_or(&mut empty_orders); + for order in orders.iter_mut() { + let groups = order + .get_mut("group") + .and_then(|value| value.as_array_of_tables_mut()) + .unwrap_or(&mut empty_groups); + for group in groups.iter_mut() { + let buildpack_id = get_group_buildpack_id(group, &buildpack_file.path)?; + if updated_dependencies.contains(&buildpack_id) { + group.insert("version", value(next_version.to_string())); + } + } + } + + Ok(buildpack_file.document.to_string()) +} + +fn promote_changelog_unreleased_to_version( + changelog: &Changelog, + version: &BuildpackVersion, + date: &DateTime, + updated_dependencies: &[BuildpackId], +) -> Changelog { + let updated_dependencies_text = if updated_dependencies.is_empty() { + None + } else { + Some( + updated_dependencies + .iter() + .map(|id| format!("- Updated `{id}` to `{version}`")) + .collect::>() + .join("\n"), + ) + }; + + let changes_with_dependencies = (&changelog.unreleased, &updated_dependencies_text); + + let body = if let (Some(changes), Some(dependencies)) = changes_with_dependencies { + format!("{}\n{}", changes.trim_end(), dependencies) + } else if let (Some(changes), None) = changes_with_dependencies { + changes.clone() + } else if let (None, Some(dependencies)) = changes_with_dependencies { + dependencies.clone() + } else { + "- No changes".to_string() + }; + + let new_release_entry = ReleaseEntry { + version: version.to_string(), + date: *date, + body, + }; + + let mut releases = IndexMap::from([(version.to_string(), new_release_entry)]); + for (id, entry) in &changelog.releases { + releases.insert(id.clone(), entry.clone()); + } + Changelog { + unreleased: None, + releases, + } +} + +#[cfg(test)] +mod test { + use crate::changelog::{Changelog, ReleaseEntry}; + use crate::commands::prepare_release::command::{ + get_fixed_version, promote_changelog_unreleased_to_version, + update_buildpack_contents_with_new_version, BuildpackFile, + }; + use crate::commands::prepare_release::errors::Error; + use chrono::{TimeZone, Utc}; + use indexmap::IndexMap; + use libcnb_data::buildpack::BuildpackVersion; + use libcnb_data::buildpack_id; + use std::collections::HashMap; + use std::path::PathBuf; + use std::str::FromStr; + use toml_edit::Document; + + #[test] + fn test_get_fixed_version() { + let buildpack_a = create_buildpack_file_with_name( + "/a/buildpack.toml", + r#"[buildpack] +id = "a" +version = "0.0.0" +"#, + ); + let buildpack_b = create_buildpack_file_with_name( + "/b/buildpack.toml", + r#"[buildpack] +id = "b" +version = "0.0.0" +"#, + ); + assert_eq!( + get_fixed_version(&vec![buildpack_a, buildpack_b]).unwrap(), + BuildpackVersion { + major: 0, + minor: 0, + patch: 0 + } + ) + } + + #[test] + fn test_get_fixed_version_errors_if_there_is_a_version_mismatch() { + let buildpack_a = create_buildpack_file_with_name( + "/a/buildpack.toml", + r#"[buildpack] +id = "a" +version = "0.0.0" +"#, + ); + let buildpack_b = create_buildpack_file_with_name( + "/b/buildpack.toml", + r#"[buildpack] +id = "b" +version = "0.0.1" +"#, + ); + match get_fixed_version(&vec![buildpack_a, buildpack_b]).unwrap_err() { + Error::NotAllVersionsMatch(version_map) => { + assert_eq!( + HashMap::from([ + ( + PathBuf::from("/a/buildpack.toml"), + BuildpackVersion { + major: 0, + minor: 0, + patch: 0 + } + ), + ( + PathBuf::from("/b/buildpack.toml"), + BuildpackVersion { + major: 0, + minor: 0, + patch: 1 + } + ) + ]), + version_map + ); + } + _ => panic!("Expected error NoFixedVersion"), + }; + } + + #[test] + fn test_update_buildpack_contents_with_new_version() { + let toml = r#"[buildpack] +id = "test" +version = "0.0.0" + "#; + + let mut buildpack_file = create_buildpack_file(toml); + let next_version = BuildpackVersion { + major: 1, + minor: 0, + patch: 0, + }; + assert_eq!( + update_buildpack_contents_with_new_version(&mut buildpack_file, &next_version, &[]) + .unwrap(), + r#"[buildpack] +id = "test" +version = "1.0.0" + "# + ); + } + + #[test] + fn test_update_buildpack_contents_with_new_version_and_order_groups_are_present() { + let toml = r#"[buildpack] +id = "test" +version = "0.0.9" + +[[order]] +[[order.group]] +id = "dep-a" +version = "0.0.9" + +[[order.group]] +id = "dep-b" +version = "0.0.9" + +[[order.group]] +id = "heroku/procfile" +version = "2.0.0" +optional = true + "#; + + let mut buildpack_file = create_buildpack_file(toml); + let next_version = BuildpackVersion { + major: 0, + minor: 0, + patch: 10, + }; + assert_eq!( + update_buildpack_contents_with_new_version( + &mut buildpack_file, + &next_version, + &[buildpack_id!("dep-a"), buildpack_id!("dep-b")] + ) + .unwrap(), + r#"[buildpack] +id = "test" +version = "0.0.10" + +[[order]] +[[order.group]] +id = "dep-a" +version = "0.0.10" + +[[order.group]] +id = "dep-b" +version = "0.0.10" + +[[order.group]] +id = "heroku/procfile" +version = "2.0.0" +optional = true + "# + ); + } + + #[test] + fn test_promote_changelog_unreleased_to_version_with_existing_entries() { + let release_entry_0_8_16 = ReleaseEntry { + version: "0.8.16".to_string(), + date: Utc.with_ymd_and_hms(2023, 2, 27, 0, 0, 0).unwrap(), + body: "- Added node version 19.7.0, 19.6.1, 14.21.3, 16.19.1, 18.14.1, 18.14.2.\n- Added node version 18.14.0, 19.6.0.".to_string() + }; + + let release_entry_0_8_15 = ReleaseEntry { + version: "0.8.15".to_string(), + date: Utc.with_ymd_and_hms(2023, 2, 27, 0, 0, 0).unwrap(), + body: "- `name` is no longer a required field in package.json. ([#447](https://github.com/heroku/buildpacks-nodejs/pull/447))\n- Added node version 19.5.0.".to_string() + }; + + let changelog = Changelog { + unreleased: Some( + "- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2".to_string(), + ), + releases: IndexMap::from([ + ("0.8.16".to_string(), release_entry_0_8_16.clone()), + ("0.8.15".to_string(), release_entry_0_8_15.clone()), + ]), + }; + + assert_eq!( + changelog.unreleased, + Some("- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2".to_string()) + ); + assert_eq!(changelog.releases.get("0.8.17"), None); + assert_eq!( + changelog.releases.get("0.8.16"), + Some(&release_entry_0_8_16) + ); + assert_eq!( + changelog.releases.get("0.8.15"), + Some(&release_entry_0_8_15) + ); + + let next_version = BuildpackVersion { + major: 0, + minor: 8, + patch: 17, + }; + let date = Utc.with_ymd_and_hms(2023, 6, 16, 0, 0, 0).unwrap(); + let updated_dependencies = vec![]; + let changelog = promote_changelog_unreleased_to_version( + &changelog, + &next_version, + &date, + &updated_dependencies, + ); + + assert_eq!(changelog.unreleased, None); + assert_eq!( + changelog.releases.get("0.8.17"), + Some(&ReleaseEntry { + version: "0.8.17".to_string(), + date, + body: "- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2".to_string() + }) + ); + assert_eq!( + changelog.releases.get("0.8.16"), + Some(&release_entry_0_8_16) + ); + assert_eq!( + changelog.releases.get("0.8.15"), + Some(&release_entry_0_8_15) + ); + } + + #[test] + fn test_promote_changelog_unreleased_to_version_with_no_entries() { + let changelog = Changelog { + unreleased: None, + releases: IndexMap::new(), + }; + + assert_eq!(changelog.unreleased, None); + assert_eq!(changelog.releases.get("0.8.17"), None); + + let next_version = BuildpackVersion { + major: 0, + minor: 8, + patch: 17, + }; + let date = Utc.with_ymd_and_hms(2023, 6, 16, 0, 0, 0).unwrap(); + let updated_dependencies = vec![]; + let changelog = promote_changelog_unreleased_to_version( + &changelog, + &next_version, + &date, + &updated_dependencies, + ); + + assert_eq!(changelog.unreleased, None); + assert_eq!( + changelog.releases.get("0.8.17"), + Some(&ReleaseEntry { + version: "0.8.17".to_string(), + date, + body: "- No changes".to_string() + }) + ); + } + + #[test] + fn test_promote_changelog_unreleased_to_version_with_existing_entries_and_updated_dependencies() + { + let release_entry_0_8_16 = ReleaseEntry { + version: "0.8.16".to_string(), + date: Utc.with_ymd_and_hms(2023, 2, 27, 0, 0, 0).unwrap(), + body: "- Added node version 19.7.0, 19.6.1, 14.21.3, 16.19.1, 18.14.1, 18.14.2.\n- Added node version 18.14.0, 19.6.0.".to_string() + }; + + let release_entry_0_8_15 = ReleaseEntry { + version: "0.8.15".to_string(), + date: Utc.with_ymd_and_hms(2023, 2, 27, 0, 0, 0).unwrap(), + body: "- `name` is no longer a required field in package.json. ([#447](https://github.com/heroku/buildpacks-nodejs/pull/447))\n- Added node version 19.5.0.".to_string() + }; + + let changelog = Changelog { + unreleased: Some( + "- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2".to_string(), + ), + releases: IndexMap::from([ + ("0.8.16".to_string(), release_entry_0_8_16.clone()), + ("0.8.15".to_string(), release_entry_0_8_15.clone()), + ]), + }; + + assert_eq!( + changelog.unreleased, + Some("- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2".to_string()) + ); + assert_eq!(changelog.releases.get("0.8.17"), None); + assert_eq!( + changelog.releases.get("0.8.16"), + Some(&release_entry_0_8_16) + ); + assert_eq!( + changelog.releases.get("0.8.15"), + Some(&release_entry_0_8_15) + ); + + let next_version = BuildpackVersion { + major: 0, + minor: 8, + patch: 17, + }; + let date = Utc.with_ymd_and_hms(2023, 6, 16, 0, 0, 0).unwrap(); + let updated_dependencies = vec![buildpack_id!("a"), buildpack_id!("b")]; + let changelog = promote_changelog_unreleased_to_version( + &changelog, + &next_version, + &date, + &updated_dependencies, + ); + + assert_eq!(changelog.unreleased, None); + assert_eq!( + changelog.releases.get("0.8.17"), + Some(&ReleaseEntry { + version: "0.8.17".to_string(), + date, + body: "- Added node version 18.15.0.\n- Added yarn version 4.0.0-rc.2\n- Updated `a` to `0.8.17`\n- Updated `b` to `0.8.17`".to_string() + }) + ); + assert_eq!( + changelog.releases.get("0.8.16"), + Some(&release_entry_0_8_16) + ); + assert_eq!( + changelog.releases.get("0.8.15"), + Some(&release_entry_0_8_15) + ); + } + + #[test] + fn test_promote_changelog_unreleased_to_version_with_no_entries_and_updated_dependencies() { + let changelog = Changelog { + unreleased: None, + releases: IndexMap::new(), + }; + + assert_eq!(changelog.unreleased, None); + assert_eq!(changelog.releases.get("0.8.17"), None); + + let next_version = BuildpackVersion { + major: 0, + minor: 8, + patch: 17, + }; + let date = Utc.with_ymd_and_hms(2023, 6, 16, 0, 0, 0).unwrap(); + let updated_dependencies = vec![buildpack_id!("a"), buildpack_id!("b")]; + let changelog = promote_changelog_unreleased_to_version( + &changelog, + &next_version, + &date, + &updated_dependencies, + ); + + assert_eq!(changelog.unreleased, None); + assert_eq!( + changelog.releases.get("0.8.17"), + Some(&ReleaseEntry { + version: "0.8.17".to_string(), + date, + body: "- Updated `a` to `0.8.17`\n- Updated `b` to `0.8.17`".to_string() + }) + ); + } + + fn create_buildpack_file(contents: &str) -> BuildpackFile { + create_buildpack_file_with_name("/path/to/test/buildpack.toml", contents) + } + + fn create_buildpack_file_with_name(name: &str, contents: &str) -> BuildpackFile { + BuildpackFile { + path: PathBuf::from(name), + document: Document::from_str(contents).unwrap(), + } + } +} diff --git a/src/commands/prepare_release/errors.rs b/src/commands/prepare_release/errors.rs new file mode 100644 index 00000000..4e900ab7 --- /dev/null +++ b/src/commands/prepare_release/errors.rs @@ -0,0 +1,146 @@ +use crate::changelog::ChangelogError; +use crate::github::actions::SetOutputError; +use libcnb_data::buildpack::BuildpackVersion; +use std::collections::HashMap; +use std::fmt::{Display, Formatter}; +use std::io; +use std::path::PathBuf; +use uriparse::URIError; + +#[derive(Debug)] +pub(crate) enum Error { + GetCurrentDir(io::Error), + InvalidRepositoryUrl(String, URIError), + NoBuildpacksFound(PathBuf), + NotAllVersionsMatch(HashMap), + NoFixedVersion, + FindingBuildpacks(PathBuf, io::Error), + ReadingChangelog(PathBuf, io::Error), + ParsingChangelog(PathBuf, ChangelogError), + ReadingBuildpack(PathBuf, io::Error), + ParsingBuildpack(PathBuf, toml_edit::TomlError), + MissingRequiredField(PathBuf, String), + InvalidBuildpackId(PathBuf, String), + InvalidBuildpackVersion(PathBuf, String), + WritingBuildpack(PathBuf, io::Error), + WritingChangelog(PathBuf, io::Error), + SetActionOutput(SetOutputError), +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Error::GetCurrentDir(error) => { + write!(f, "Failed to get current directory\nError: {error}") + } + + Error::InvalidRepositoryUrl(value, error) => { + write!(f, "Invalid URL `{value}`\nError: {error}") + } + + Error::NoBuildpacksFound(path) => { + write!(f, "No buildpacks found under {}", path.display()) + } + + Error::NotAllVersionsMatch(version_map) => { + write!( + f, + "Not all versions match:\n{}", + version_map + .iter() + .map(|(path, version)| format!("• {version} ({})", path.display())) + .collect::>() + .join("\n") + ) + } + + Error::NoFixedVersion => { + write!(f, "No fixed version could be determined") + } + + Error::FindingBuildpacks(path, error) => { + write!( + f, + "I/O error while finding buildpacks\nPath: {}\nError: {error}", + path.display() + ) + } + Error::ReadingBuildpack(path, error) => { + write!( + f, + "Could not read buildpack\nPath: {}\nError: {error}", + path.display() + ) + } + Error::ParsingBuildpack(path, error) => { + write!( + f, + "Could not parse buildpack\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::WritingBuildpack(path, error) => { + write!( + f, + "Could not write buildpack\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::ReadingChangelog(path, error) => { + write!( + f, + "Could not read changelog\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::ParsingChangelog(path, error) => { + write!( + f, + "Could not parse changelog\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::WritingChangelog(path, error) => { + write!( + f, + "Could not write changelog\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::SetActionOutput(set_output_error) => match set_output_error { + SetOutputError::Opening(error) | SetOutputError::Writing(error) => { + write!(f, "Could not write action output\nError: {error}") + } + }, + + Error::MissingRequiredField(path, field) => { + write!( + f, + "Missing required field `{field}` in buildpack.toml\nPath: {}", + path.display() + ) + } + + Error::InvalidBuildpackId(path, id) => { + write!( + f, + "Invalid buildpack id `{id}` in buildpack.toml\nPath: {}", + path.display() + ) + } + + Error::InvalidBuildpackVersion(path, version) => { + write!( + f, + "Invalid buildpack version `{version}` in buildpack.toml\nPath: {}", + path.display() + ) + } + } + } +} diff --git a/src/commands/prepare_release/mod.rs b/src/commands/prepare_release/mod.rs new file mode 100644 index 00000000..7d8e5d60 --- /dev/null +++ b/src/commands/prepare_release/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod command; +pub(crate) mod errors; + +pub(crate) use command::execute; diff --git a/src/commands/update_builder/command.rs b/src/commands/update_builder/command.rs new file mode 100644 index 00000000..8f8ebb06 --- /dev/null +++ b/src/commands/update_builder/command.rs @@ -0,0 +1,213 @@ +use crate::update_builder::errors::Error; +use clap::Parser; +use libcnb_data::buildpack::{BuildpackId, BuildpackVersion}; +use std::path::PathBuf; +use std::str::FromStr; +use toml_edit::{value, Document}; +use uriparse::URIReference; + +type Result = std::result::Result; + +#[derive(Parser, Debug)] +#[command(author, version, about = "Updates all references to a buildpack in heroku/builder for the given list of builders", long_about = None)] +pub(crate) struct UpdateBuilderArgs { + #[arg(long)] + pub(crate) buildpack_id: BuildpackId, + #[arg(long)] + pub(crate) buildpack_version: String, + #[arg(long)] + pub(crate) buildpack_uri: String, + #[arg(long, required = true, value_delimiter = ',', num_args = 1..)] + pub(crate) builders: Vec, + #[arg(long, required = true)] + pub(crate) path: String, +} + +struct BuilderFile { + path: PathBuf, + document: Document, +} + +pub(crate) fn execute(args: UpdateBuilderArgs) -> Result<()> { + let current_dir = std::env::current_dir() + .map_err(Error::GetCurrentDir) + .map(|dir| dir.join(PathBuf::from(args.path)))?; + + let buildpack_id = args.buildpack_id; + + let buildpack_uri = URIReference::try_from(args.buildpack_uri.as_str()) + .map_err(|e| Error::InvalidBuildpackUri(args.buildpack_uri.clone(), e))?; + + let buildpack_version = BuildpackVersion::try_from(args.buildpack_version.to_string()) + .map_err(|e| Error::InvalidBuildpackVersion(args.buildpack_version, e))?; + + let builder_files = args + .builders + .iter() + .map(|builder| read_builder_file(current_dir.join(builder).join("builder.toml"))) + .collect::>>()?; + + if builder_files.is_empty() { + Err(Error::NoBuilderFiles(args.builders))?; + } + + for mut builder_file in builder_files { + let new_contents = update_builder_contents_with_buildpack( + &mut builder_file, + &buildpack_id, + &buildpack_version, + &buildpack_uri, + )?; + + std::fs::write(&builder_file.path, new_contents) + .map_err(|e| Error::WritingBuilder(builder_file.path.clone(), e))?; + + eprintln!( + "✅️ Updated {buildpack_id} for builder: {}", + builder_file.path.display() + ); + } + + Ok(()) +} + +fn read_builder_file(path: PathBuf) -> Result { + let contents = + std::fs::read_to_string(&path).map_err(|e| Error::ReadingBuilder(path.clone(), e))?; + let document = + Document::from_str(&contents).map_err(|e| Error::ParsingBuilder(path.clone(), e))?; + Ok(BuilderFile { path, document }) +} + +fn update_builder_contents_with_buildpack( + builder_file: &mut BuilderFile, + buildpack_id: &BuildpackId, + buildpack_version: &BuildpackVersion, + buildpack_uri: &URIReference, +) -> Result { + builder_file + .document + .get_mut("buildpacks") + .and_then(|value| value.as_array_of_tables_mut()) + .unwrap_or(&mut toml_edit::ArrayOfTables::default()) + .iter_mut() + .for_each(|buildpack| { + let matches_id = buildpack + .get("id") + .and_then(|item| item.as_str()) + .filter(|value| value == &buildpack_id.as_str()) + .is_some(); + if matches_id { + buildpack["uri"] = value(buildpack_uri.to_string()); + } + }); + + let order_list = builder_file + .document + .get_mut("order") + .and_then(|value| value.as_array_of_tables_mut()) + .ok_or(Error::BuilderMissingRequiredKey( + builder_file.path.clone(), + "order".to_string(), + ))?; + + for order in order_list.iter_mut() { + let group_list = order + .get_mut("group") + .and_then(|value| value.as_array_of_tables_mut()) + .ok_or(Error::BuilderMissingRequiredKey( + builder_file.path.clone(), + "group".to_string(), + ))?; + + for group in group_list.iter_mut() { + let matches_id = group + .get("id") + .and_then(|item| item.as_str()) + .filter(|value| value == &buildpack_id.as_str()) + .is_some(); + if matches_id { + group["version"] = value(buildpack_version.to_string()); + } + } + } + + Ok(builder_file.document.to_string()) +} + +#[cfg(test)] +mod test { + use crate::commands::update_builder::command::{ + update_builder_contents_with_buildpack, BuilderFile, + }; + use libcnb_data::buildpack::BuildpackVersion; + use libcnb_data::buildpack_id; + use std::path::PathBuf; + use std::str::FromStr; + use toml_edit::Document; + use uriparse::URIReference; + + #[test] + fn test_update_builder_contents_with_buildpack() { + let toml = r#" +[[buildpacks]] + id = "heroku/java" + uri = "docker://docker.io/heroku/buildpack-java@sha256:21990393c93927b16f76c303ae007ea7e95502d52b0317ca773d4cd51e7a5682" + +[[buildpacks]] + id = "heroku/nodejs" + uri = "docker://docker.io/heroku/buildpack-nodejs@sha256:22ec91eebee2271b99368844f193c4bb3c6084201062f89b3e45179b938c3241" + +[[order]] + [[order.group]] + id = "heroku/nodejs" + version = "0.6.5" + +[[order]] + [[order.group]] + id = "heroku/java" + version = "0.6.9" + + [[order.group]] + id = "heroku/procfile" + version = "2.0.0" + optional = true +"#; + let mut builder_file = BuilderFile { + path: PathBuf::from("/path/to/builder.toml"), + document: Document::from_str(toml).unwrap(), + }; + assert_eq!( + update_builder_contents_with_buildpack( + &mut builder_file, + &buildpack_id!("heroku/java"), + &BuildpackVersion::try_from("0.6.10".to_string()).unwrap(), + &URIReference::try_from("docker://docker.io/heroku/buildpack-java@sha256:c6dd500be06a2a1e764c30359c5dd4f4955a98b572ef3095b2f6115cd8a87c99").unwrap() + ).unwrap(), + r#" +[[buildpacks]] + id = "heroku/java" + uri = "docker://docker.io/heroku/buildpack-java@sha256:c6dd500be06a2a1e764c30359c5dd4f4955a98b572ef3095b2f6115cd8a87c99" + +[[buildpacks]] + id = "heroku/nodejs" + uri = "docker://docker.io/heroku/buildpack-nodejs@sha256:22ec91eebee2271b99368844f193c4bb3c6084201062f89b3e45179b938c3241" + +[[order]] + [[order.group]] + id = "heroku/nodejs" + version = "0.6.5" + +[[order]] + [[order.group]] + id = "heroku/java" + version = "0.6.10" + + [[order.group]] + id = "heroku/procfile" + version = "2.0.0" + optional = true +"# + ) + } +} diff --git a/src/commands/update_builder/errors.rs b/src/commands/update_builder/errors.rs new file mode 100644 index 00000000..88a8a5a0 --- /dev/null +++ b/src/commands/update_builder/errors.rs @@ -0,0 +1,82 @@ +use std::fmt::{Display, Formatter}; +use std::path::PathBuf; + +#[derive(Debug)] +pub(crate) enum Error { + GetCurrentDir(std::io::Error), + InvalidBuildpackUri(String, uriparse::URIReferenceError), + InvalidBuildpackVersion(String, libcnb_data::buildpack::BuildpackVersionError), + ReadingBuilder(PathBuf, std::io::Error), + ParsingBuilder(PathBuf, toml_edit::TomlError), + BuilderMissingRequiredKey(PathBuf, String), + WritingBuilder(PathBuf, std::io::Error), + NoBuilderFiles(Vec), +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Error::GetCurrentDir(error) => { + write!(f, "Could not get the current directory\nError: {error}") + } + + Error::InvalidBuildpackUri(value, error) => { + write!( + f, + "The buildpack URI argument is invalid\nValue: {value}\nError: {error}" + ) + } + + Error::InvalidBuildpackVersion(value, error) => { + write!( + f, + "The buildpack version argument is invalid\nValue: {value}\nError: {error}" + ) + } + + Error::ReadingBuilder(path, error) => { + write!( + f, + "Could not read builder\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::ParsingBuilder(path, error) => { + write!( + f, + "Could not parse builder\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::BuilderMissingRequiredKey(path, key) => { + write!( + f, + "Missing required key `{key}` in builder\nPath: {}", + path.display() + ) + } + + Error::WritingBuilder(path, error) => { + write!( + f, + "Error writing builder\nPath: {}\nError: {error}", + path.display() + ) + } + + Error::NoBuilderFiles(builders) => { + write!( + f, + "No builder.toml files found in the given builder directories\n{}", + builders + .iter() + .map(|builder| format!("• {builder}")) + .collect::>() + .join("\n") + ) + } + } + } +} diff --git a/src/commands/update_builder/mod.rs b/src/commands/update_builder/mod.rs new file mode 100644 index 00000000..7d8e5d60 --- /dev/null +++ b/src/commands/update_builder/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod command; +pub(crate) mod errors; + +pub(crate) use command::execute; diff --git a/src/github/actions.rs b/src/github/actions.rs new file mode 100644 index 00000000..7cc46f57 --- /dev/null +++ b/src/github/actions.rs @@ -0,0 +1,40 @@ +use rand::distributions::{Alphanumeric, DistString}; +use std::fs::OpenOptions; +use std::io; +use std::io::{stdout, Write}; + +pub(crate) fn set_output, V: Into>( + name: N, + value: V, +) -> Result<(), SetOutputError> { + let name = name.into(); + let value = value.into(); + + let line = if value.contains('\n') { + let delimiter = Alphanumeric.sample_string(&mut rand::thread_rng(), 20); + format!("{name}<<{delimiter}\n{value}\n{delimiter}") + } else { + format!("{name}={value}") + }; + let line = format!("{line}\n"); + + let mut file: Box = match std::env::var("GITHUB_OUTPUT") { + Ok(github_output) => { + let append_file = OpenOptions::new() + .append(true) + .open(github_output) + .map_err(SetOutputError::Opening)?; + Box::new(append_file) + } + Err(_) => Box::new(stdout()), + }; + + file.write_all(line.as_bytes()) + .map_err(SetOutputError::Writing) +} + +#[derive(Debug)] +pub(crate) enum SetOutputError { + Opening(io::Error), + Writing(io::Error), +} diff --git a/src/github/mod.rs b/src/github/mod.rs new file mode 100644 index 00000000..25c97617 --- /dev/null +++ b/src/github/mod.rs @@ -0,0 +1 @@ +pub(crate) mod actions; diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 00000000..aa1de66e --- /dev/null +++ b/src/main.rs @@ -0,0 +1,55 @@ +use crate::commands::generate_buildpack_matrix::command::GenerateBuildpackMatrixArgs; +use crate::commands::generate_changelog::command::GenerateChangelogArgs; +use crate::commands::prepare_release::command::PrepareReleaseArgs; +use crate::commands::update_builder::command::UpdateBuilderArgs; +use crate::commands::{ + generate_buildpack_matrix, generate_changelog, prepare_release, update_builder, +}; +use clap::Parser; + +mod changelog; +mod commands; +mod github; + +const UNSPECIFIED_ERROR: i32 = 1; + +#[derive(Parser)] +#[command(bin_name = "actions")] +pub(crate) enum Cli { + GenerateBuildpackMatrix(GenerateBuildpackMatrixArgs), + GenerateChangelog(GenerateChangelogArgs), + PrepareRelease(PrepareReleaseArgs), + UpdateBuilder(UpdateBuilderArgs), +} + +fn main() { + match Cli::parse() { + Cli::GenerateBuildpackMatrix(args) => { + if let Err(error) = generate_buildpack_matrix::execute(args) { + eprintln!("❌ {error}"); + std::process::exit(UNSPECIFIED_ERROR); + } + } + + Cli::GenerateChangelog(args) => { + if let Err(error) = generate_changelog::execute(args) { + eprintln!("❌ {error}"); + std::process::exit(UNSPECIFIED_ERROR); + } + } + + Cli::PrepareRelease(args) => { + if let Err(error) = prepare_release::execute(args) { + eprintln!("❌ {error}"); + std::process::exit(UNSPECIFIED_ERROR); + } + } + + Cli::UpdateBuilder(args) => { + if let Err(error) = update_builder::execute(args) { + eprintln!("❌ {error}"); + std::process::exit(UNSPECIFIED_ERROR); + } + } + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..829ec993 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "target": "es2015", + "moduleResolution": "node" + }, + "include": [ + ".github/bootstrap/*" + ] +}