From d4d463bd9b10ee86a7deffecaa831cb35929497d Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Fri, 14 Oct 2022 21:54:25 +0200 Subject: [PATCH] bump deps and rebuild --- dist/restore/index.js | 1352 +++++++++++++++++++++------------------- dist/save/index.js | 1382 ++++++++++++++++++++++------------------- package-lock.json | 73 +-- package.json | 4 +- 4 files changed, 1488 insertions(+), 1323 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index fb6f17e..0012890 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2494,7 +2494,6 @@ const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); -const uuid_1 = __nccwpck_require__(8974); const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action @@ -2524,20 +2523,9 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. - if (name.includes(delimiter)) { - throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); - } - if (convertedVal.includes(delimiter)) { - throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); - } - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } + command_1.issueCommand('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** @@ -2555,7 +2543,7 @@ exports.setSecret = setSecret; function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { - file_command_1.issueCommand('PATH', inputPath); + file_command_1.issueFileCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); @@ -2595,7 +2583,10 @@ function getMultilineInput(name, options) { const inputs = getInput(name, options) .split('\n') .filter(x => x !== ''); - return inputs; + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); } exports.getMultilineInput = getMultilineInput; /** @@ -2628,8 +2619,12 @@ exports.getBooleanInput = getBooleanInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, value); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } exports.setOutput = setOutput; /** @@ -2758,7 +2753,11 @@ exports.group = group; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } exports.saveState = saveState; /** @@ -2824,13 +2823,14 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(7147)); const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(8974); const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { +function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); @@ -2842,7 +2842,22 @@ function issueCommand(command, message) { encoding: 'utf8' }); } -exports.issueCommand = issueCommand; +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; //# sourceMappingURL=file-command.js.map /***/ }), @@ -17230,8 +17245,7 @@ function setStateError(inputs) { }; } function processOperationStatus(result) { - const { state, stateProxy, status } = result; - logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + const { state, stateProxy, status, isDone, processResult, response, setErrorAsResult } = result; switch (status) { case "succeeded": { stateProxy.setSucceeded(state); @@ -17247,6 +17261,15 @@ function processOperationStatus(result) { break; } } + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } } function buildResult(inputs) { const { processResult, response, state } = inputs; @@ -17256,7 +17279,7 @@ function buildResult(inputs) { * Initiates the long-running operation. */ async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation } = inputs; + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); @@ -17267,41 +17290,33 @@ async function initOperation(inputs) { }; logger.verbose(`LRO: Operation description:`, config); const state = stateProxy.initState(config); - const status = getOperationStatus(response, state); - if (status === "succeeded" || operationLocation === undefined) { - stateProxy.setSucceeded(state); - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); return state; } async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, resourceLocation, getOperationStatus, options, } = inputs; + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, options, } = inputs; const response = await poll(operationLocation, options).catch(setStateError({ state, stateProxy, })); const status = getOperationStatus(response, state); - processOperationStatus({ - status, - state, - stateProxy, - }); - if (status === "succeeded" && resourceLocation !== undefined) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy })), - status, - }; + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy })), + status, + }; + } } return { response, status }; } /** Polls the long-running operation. */ async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getOperationLocation, withOperationLocation, getPollingInterval, processResult, updateState, setDelay, isDone, } = inputs; - const { operationLocation, resourceLocation } = state.config; + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, withOperationLocation, getPollingInterval, processResult, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; if (operationLocation !== undefined) { const { response, status } = await pollOperationHelper({ poll, @@ -17309,18 +17324,19 @@ async function pollOperation(inputs) { state, stateProxy, operationLocation, - resourceLocation, + getResourceLocation, options, }); - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || - (isDone === undefined && ["succeeded", "canceled"].includes(status))) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } - else { + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); @@ -17411,15 +17427,21 @@ function inferLroMode(inputs) { return undefined; } } -function transformStatus(status) { - switch (status === null || status === void 0 ? void 0 : status.toLowerCase()) { +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: + return toOperationStatus(statusCode); case "succeeded": return "succeeded"; case "failed": return "failed"; case "running": case "accepted": + case "started": case "canceling": case "cancelling": return "running"; @@ -17435,13 +17457,13 @@ function transformStatus(status) { function getStatus(rawResponse) { var _a; const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return transformStatus(status); + return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { var _a, _b; const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus(state); + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { if (statusCode === 202) { @@ -17473,11 +17495,28 @@ function calculatePollingIntervalFromDate(retryAfterDate) { } return undefined; } +function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} /** * Initiates the long-running operation. */ async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro } = inputs; + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; return initOperation({ init: async () => { const response = await lro.sendInitialRequest(); @@ -17493,14 +17532,8 @@ async function initHttpOperation(inputs) { processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: (response, state) => { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - return mode === undefined || - (mode === "Body" && getOperationStatus(response, state) === "succeeded") - ? "succeeded" - : "running"; - }, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, }); } function getOperationLocation({ rawResponse }, state) { @@ -17536,12 +17569,21 @@ function getOperationStatus({ rawResponse }, state) { return getProvisioningState(rawResponse); } default: - throw new Error(`Unexpected operation mode: ${mode}`); + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } } +function getResourceLocation({ flatResponse }, state) { + if (typeof flatResponse === "object") { + const resourceLocation = flatResponse.resourceLocation; + if (resourceLocation !== undefined) { + state.config.resourceLocation = resourceLocation; + } + } + return state.config.resourceLocation; +} /** Polls the long-running operation. */ async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state } = inputs; + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; return pollOperation({ state, stateProxy, @@ -17553,12 +17595,14 @@ async function pollHttpOperation(inputs) { getPollingInterval: parseRetryAfter, getOperationLocation, getOperationStatus, + getResourceLocation, options, /** * The expansion here is intentional because `lro` could be an object that * references an inner this, so we need to preserve a reference to it. */ poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, }); } @@ -17639,7 +17683,7 @@ const createStateProxy$1 = () => ({ * Returns a poller factory. */ function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, getPollingInterval, } = inputs; + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, getResourceLocation, getPollingInterval, resolveOnUnsuccessful, } = inputs; return async ({ init, poll }, options) => { const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; const stateProxy = createStateProxy$1(); @@ -17663,6 +17707,7 @@ function buildCreatePoller(inputs) { processResult, getOperationStatus: getStatusFromInitialResponse, withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, }); let resultPromise; let cancelJob; @@ -17706,10 +17751,14 @@ function buildCreatePoller(inputs) { return poller.getResult(); } case "canceled": { - throw new Error("Operation was canceled"); + if (!resolveOnUnsuccessful) + throw new Error("Operation was canceled"); + return poller.getResult(); } case "failed": { - throw state.error; + if (!resolveOnUnsuccessful) + throw state.error; + return poller.getResult(); } case "notStarted": case "running": { @@ -17729,18 +17778,20 @@ function buildCreatePoller(inputs) { withOperationLocation, getPollingInterval, getOperationStatus: getStatusFromPollResponse, + getResourceLocation, processResult, updateState, options: pollOptions, setDelay: (pollIntervalInMs) => { currentPollIntervalInMs = pollIntervalInMs; }, + setErrorAsResult: !resolveOnUnsuccessful, }); await handleProgressEvents(); - if (state.status === "canceled") { + if (state.status === "canceled" && !resolveOnUnsuccessful) { throw new Error("Operation was canceled"); } - if (state.status === "failed") { + if (state.status === "failed" && !resolveOnUnsuccessful) { throw state.error; } }, @@ -17757,19 +17808,14 @@ function buildCreatePoller(inputs) { * @returns an initialized poller */ async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, } = options || {}; + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; return buildCreatePoller({ - getStatusFromInitialResponse: (response, state) => { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - return mode === undefined || - (mode === "Body" && getOperationStatus(response, state) === "succeeded") - ? "succeeded" - : "running"; - }, + getStatusFromInitialResponse, getStatusFromPollResponse: getOperationStatus, getOperationLocation, + getResourceLocation, getPollingInterval: parseRetryAfter, + resolveOnUnsuccessful, })({ init: async () => { const response = await lro.sendInitialRequest(); @@ -17812,9 +17858,10 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); class GenericPollOperation { - constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; + this.setErrorAsResult = setErrorAsResult; this.lroResourceLocationConfig = lroResourceLocationConfig; this.processResult = processResult; this.updateState = updateState; @@ -17832,11 +17879,12 @@ class GenericPollOperation { stateProxy, resourceLocationConfig: this.lroResourceLocationConfig, processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, }))); } const updateState = this.updateState; const isDone = this.isDone; - if (!this.state.isCompleted) { + if (!this.state.isCompleted && this.state.error === undefined) { await pollHttpOperation({ lro: this.lro, state: this.state, @@ -17852,6 +17900,7 @@ class GenericPollOperation { setDelay: (intervalInMs) => { this.pollerConfig.intervalInMs = intervalInMs; }, + setErrorAsResult: this.setErrorAsResult, }); } (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); @@ -18024,6 +18073,8 @@ class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; @@ -18061,15 +18112,10 @@ class Poller { */ async pollOnce(options = {}) { if (!this.isDone()) { - try { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this), - }); - } - catch (e) { - this.operation.state.error = e; - } + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); } this.processUpdatedState(); } @@ -18113,22 +18159,26 @@ class Poller { processUpdatedState() { if (this.operation.state.error) { this.stopped = true; - this.reject(this.operation.state.error); - throw this.operation.state.error; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } } if (this.operation.state.isCancelled) { this.stopped = true; - const error = new PollerCancelledError("Operation was canceled"); - this.reject(error); - throw error; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } } - else if (this.isDone() && this.resolve) { + if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. // However, it can be the case that TResult is instantiated to void, so // we are not expecting a result anyway. To assert that we might not // have a result eventually after finishing polling, we cast the result // to TResult. - this.resolve(this.operation.state.result); + this.resolve(this.getResult()); } } /** @@ -18273,12 +18323,13 @@ class Poller { */ class LroEngine extends Poller { constructor(lro, options) { - const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom } = options || {}; + const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom ? deserializeState(resumeFrom) : {}; - const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; this.config = { intervalInMs: intervalInMs }; operation.setPollerConfig(this.config); } @@ -18945,6 +18996,7 @@ exports.setSpanContext = setSpanContext; Object.defineProperty(exports, "__esModule", ({ value: true })); +var abortController = __nccwpck_require__(2557); var crypto = __nccwpck_require__(6113); // Copyright (c) Microsoft Corporation. @@ -18957,13 +19009,77 @@ const isNode = typeof process !== "undefined" && Boolean(process.version) && Boo // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} + +// Copyright (c) Microsoft Corporation. +const StandardAbortMessage = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options * @returns Promise that is resolved after timeInMs */ -function delay(timeInMs) { - return new Promise((resolve) => setTimeout(() => resolve(), timeInMs)); +function delay(timeInMs, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + var _a; + return reject(new abortController.AbortError((_a = options === null || options === void 0 ? void 0 : options.abortErrorMsg) !== null && _a !== void 0 ? _a : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(); + }, timeInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); } // Copyright (c) Microsoft Corporation. @@ -19061,40 +19177,6 @@ async function computeSha256Hash(content, encoding) { return crypto.createHash("sha256").update(content).digest(encoding); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. - */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; - } - } - return true; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified property. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. - */ -function objectHasProperty(thing, property) { - return (isDefined(thing) && typeof thing === "object" && property in thing); -} - exports.computeSha256Hash = computeSha256Hash; exports.computeSha256Hmac = computeSha256Hmac; exports.delay = delay; @@ -64364,507 +64446,507 @@ var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __nccwpck_require__(1514); ;// CONCATENATED MODULE: ./src/utils.ts - - -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); - lib_core.info(`[warning] ${stderr}`); - throw e; - } - return stdout; -} + + +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); + lib_core.info(`[warning] ${stderr}`); + throw e; + } + return stdout; +} ;// CONCATENATED MODULE: ./src/workspace.ts - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages() { - let packages = []; - try { - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { - cwd: this.root, - })); - for (const pkg of meta.packages) { - if (pkg.manifest_path.startsWith(this.root)) { - continue; - } - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch { } - return packages; - } -} + + +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages() { + let packages = []; + try { + const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { + cwd: this.root, + })); + for (const pkg of meta.packages) { + if (pkg.manifest_path.startsWith(this.root)) { + continue; + } + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); + } + } + catch { } + return packages; + } +} ;// CONCATENATED MODULE: ./src/config.ts - - - - - - - - -const HOME = external_os_default().homedir(); -const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; -const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; -const config_STATE_BINS = "RUST_CACHE_BINS"; -const STATE_KEY = "RUST_CACHE_KEY"; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** The workspace configurations */ - this.workspaces = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = `v0-rust`; - const sharedKey = lib_core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = lib_core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${hasher.digest("hex")}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - // This part is computed in the "pre"/"restore" part of the job and persisted - // into the `state`. That state is loaded in the "post"/"save" part of the - // job so we have consistent values even though the "main" actions run - // might create/overwrite lockfiles. - let lockHash = lib_core.getState(STATE_LOCKFILE_HASH); - let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]"); - if (!lockHash) { - const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { - followSymbolicLinks: false, - }); - keyFiles = await globber.glob(); - keyFiles.sort((a, b) => a.localeCompare(b)); - hasher = external_crypto_default().createHash("sha1"); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - lockHash = hasher.digest("hex"); - lib_core.saveState(STATE_LOCKFILE_HASH, lockHash); - lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); - } - self.keyFiles = keyFiles; - key += `-${lockHash}`; - self.cacheKey = key; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = lib_core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)]; - return self; - } - printInfo() { - lib_core.startGroup("Cache Configuration"); - lib_core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - lib_core.info(` ${workspace.root}`); - } - lib_core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - lib_core.info(` ${path}`); - } - lib_core.info(`Restore Key:`); - lib_core.info(` ${this.restoreKey}`); - lib_core.info(`Cache Key:`); - lib_core.info(` ${this.cacheKey}`); - lib_core.info(`.. Prefix:`); - lib_core.info(` - ${this.keyPrefix}`); - lib_core.info(`.. Environment considered:`); - lib_core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - lib_core.info(` - ${env}`); - } - lib_core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - lib_core.info(` - ${file}`); - } - lib_core.endGroup(); - } -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} + + + + + + + + +const HOME = external_os_default().homedir(); +const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); +const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; +const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; +const config_STATE_BINS = "RUST_CACHE_BINS"; +const STATE_KEY = "RUST_CACHE_KEY"; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** The workspace configurations */ + this.workspaces = []; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = `v0-rust`; + const sharedKey = lib_core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = lib_core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if (job) { + key += `-${job}`; + } + } + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = external_crypto_default().createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + key += `-${hasher.digest("hex")}`; + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + // This part is computed in the "pre"/"restore" part of the job and persisted + // into the `state`. That state is loaded in the "post"/"save" part of the + // job so we have consistent values even though the "main" actions run + // might create/overwrite lockfiles. + let lockHash = lib_core.getState(STATE_LOCKFILE_HASH); + let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]"); + if (!lockHash) { + const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { + followSymbolicLinks: false, + }); + keyFiles = await globber.glob(); + keyFiles.sort((a, b) => a.localeCompare(b)); + hasher = external_crypto_default().createHash("sha1"); + for (const file of keyFiles) { + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); + } + } + lockHash = hasher.digest("hex"); + lib_core.saveState(STATE_LOCKFILE_HASH, lockHash); + lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); + } + self.keyFiles = keyFiles; + key += `-${lockHash}`; + self.cacheKey = key; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = lib_core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = external_path_default().resolve(root); + target = external_path_default().join(root, target); + workspaces.push(new Workspace(root, target)); + } + self.workspaces = workspaces; + self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)]; + return self; + } + printInfo() { + lib_core.startGroup("Cache Configuration"); + lib_core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + lib_core.info(` ${workspace.root}`); + } + lib_core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + lib_core.info(` ${path}`); + } + lib_core.info(`Restore Key:`); + lib_core.info(` ${this.restoreKey}`); + lib_core.info(`Cache Key:`); + lib_core.info(` ${this.cacheKey}`); + lib_core.info(`.. Prefix:`); + lib_core.info(` - ${this.keyPrefix}`); + lib_core.info(`.. Environment considered:`); + lib_core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + lib_core.info(` - ${env}`); + } + lib_core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + lib_core.info(` - ${file}`); + } + lib_core.endGroup(); + } +} +async function getRustVersion() { + const stdout = await getCmdOutput("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} ;// CONCATENATED MODULE: ./src/cleanup.ts - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning profile directory "${profileDir}"`); - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -async function cleanBin() { - const bins = await getCargoBins(); - const oldBins = JSON.parse(core.getState(STATE_BINS)); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(path.join(CARGO_HOME, "registry", "src")); - // `.cargo/registry/index` - const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/gitpro.ttaallkk.top-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = path.join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(path.join(dirPath, ".git"))) { - await rmRF(path.join(dirPath, ".cache")); - } - // TODO: else, clean `.cache` based on the `packages` - } - } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - // `.cargo/registry/cache` - const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/gitpro.ttaallkk.top-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -async function cleanGit(packages) { - const coPath = path.join(CARGO_HOME, "git", "checkouts"); - const dbPath = path.join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await fs.promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await fs.promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName`, except the ones matching - * any string in the `keepPrefix` set. - * - * The matching strips and trailing `-$hash` suffix. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - let isOutdated = false; - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - } - if (!keepPrefix.has(name) || isOutdated) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - lib_core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await lib_io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} -async function exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} + + + + + +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + lib_core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await external_fs_default().promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = external_path_default().join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + lib_core.debug(`cleaning profile directory "${profileDir}"`); + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); + const oldBins = JSON.parse(core.getState(STATE_BINS)); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages) { + // `.cargo/registry/src` + // we can remove this completely, as cargo will recreate this from `cache` + await rmRF(path.join(CARGO_HOME, "registry", "src")); + // `.cargo/registry/index` + const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/gitpro.ttaallkk.top-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = path.join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await exists(path.join(dirPath, ".git"))) { + await rmRF(path.join(dirPath, ".cache")); + } + // TODO: else, clean `.cache` based on the `packages` + } + } + const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/cache` + const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/gitpro.ttaallkk.top-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +async function cleanGit(packages) { + const coPath = path.join(CARGO_HOME, "git", "checkouts"); + const dbPath = path.join(CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await fs.promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await fs.promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName`, except the ones matching + * any string in the `keepPrefix` set. + * + * The matching strips and trailing `-$hash` suffix. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of dir) { + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + let isOutdated = false; + if (checkTimestamp) { + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + } + if (!keepPrefix.has(name) || isOutdated) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = external_path_default().join(parent, dirent.name); + lib_core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await external_fs_default().promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await lib_io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} +async function exists(path) { + try { + await external_fs_default().promises.access(path); + return true; + } + catch { + return false; + } +} ;// CONCATENATED MODULE: ./src/restore.ts - - - - -process.on("uncaughtException", (e) => { - lib_core.info(`[warning] ${e.message}`); - if (e.stack) { - lib_core.info(e.stack); - } -}); -async function run() { - if (!cache.isFeatureAvailable()) { - setCacheHitOutput(false); - return; - } - try { - var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); - if (cacheOnFailure !== "true") { - cacheOnFailure = "false"; - } - lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); - lib_core.exportVariable("CARGO_INCREMENTAL", 0); - const config = await CacheConfig["new"](); - config.printInfo(); - lib_core.info(""); - const bins = await getCargoBins(); - lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins])); - lib_core.info(`... Restoring cache ...`); - const key = config.cacheKey; - const restoreKey = await cache.restoreCache(config.cachePaths, key, [config.restoreKey]); - if (restoreKey) { - lib_core.info(`Restored from cache key "${restoreKey}".`); - lib_core.saveState(STATE_KEY, restoreKey); - if (restoreKey !== key) { - // pre-clean the target directory on cache mismatch - for (const workspace of config.workspaces) { - try { - const packages = await workspace.getPackages(); - await cleanTargetDir(workspace.target, packages, true); - } - catch { } - } - } - setCacheHitOutput(restoreKey === key); - } - else { - lib_core.info("No cache found."); - setCacheHitOutput(false); - } - } - catch (e) { - setCacheHitOutput(false); - lib_core.info(`[warning] ${e.stack}`); - } -} -function setCacheHitOutput(cacheHit) { - lib_core.setOutput("cache-hit", cacheHit.toString()); -} -run(); + + + + +process.on("uncaughtException", (e) => { + lib_core.info(`[warning] ${e.message}`); + if (e.stack) { + lib_core.info(e.stack); + } +}); +async function run() { + if (!cache.isFeatureAvailable()) { + setCacheHitOutput(false); + return; + } + try { + var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); + if (cacheOnFailure !== "true") { + cacheOnFailure = "false"; + } + lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); + lib_core.exportVariable("CARGO_INCREMENTAL", 0); + const config = await CacheConfig["new"](); + config.printInfo(); + lib_core.info(""); + const bins = await getCargoBins(); + lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins])); + lib_core.info(`... Restoring cache ...`); + const key = config.cacheKey; + const restoreKey = await cache.restoreCache(config.cachePaths, key, [config.restoreKey]); + if (restoreKey) { + lib_core.info(`Restored from cache key "${restoreKey}".`); + lib_core.saveState(STATE_KEY, restoreKey); + if (restoreKey !== key) { + // pre-clean the target directory on cache mismatch + for (const workspace of config.workspaces) { + try { + const packages = await workspace.getPackages(); + await cleanTargetDir(workspace.target, packages, true); + } + catch { } + } + } + setCacheHitOutput(restoreKey === key); + } + else { + lib_core.info("No cache found."); + setCacheHitOutput(false); + } + } + catch (e) { + setCacheHitOutput(false); + lib_core.info(`[warning] ${e.stack}`); + } +} +function setCacheHitOutput(cacheHit) { + lib_core.setOutput("cache-hit", cacheHit.toString()); +} +run(); })(); diff --git a/dist/save/index.js b/dist/save/index.js index 3cc65b7..c2802aa 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2494,7 +2494,6 @@ const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); -const uuid_1 = __nccwpck_require__(8974); const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action @@ -2524,20 +2523,9 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. - if (name.includes(delimiter)) { - throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); - } - if (convertedVal.includes(delimiter)) { - throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); - } - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } + command_1.issueCommand('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** @@ -2555,7 +2543,7 @@ exports.setSecret = setSecret; function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { - file_command_1.issueCommand('PATH', inputPath); + file_command_1.issueFileCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); @@ -2595,7 +2583,10 @@ function getMultilineInput(name, options) { const inputs = getInput(name, options) .split('\n') .filter(x => x !== ''); - return inputs; + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); } exports.getMultilineInput = getMultilineInput; /** @@ -2628,8 +2619,12 @@ exports.getBooleanInput = getBooleanInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, value); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } exports.setOutput = setOutput; /** @@ -2758,7 +2753,11 @@ exports.group = group; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } exports.saveState = saveState; /** @@ -2824,13 +2823,14 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(7147)); const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(8974); const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { +function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); @@ -2842,7 +2842,22 @@ function issueCommand(command, message) { encoding: 'utf8' }); } -exports.issueCommand = issueCommand; +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; //# sourceMappingURL=file-command.js.map /***/ }), @@ -17230,8 +17245,7 @@ function setStateError(inputs) { }; } function processOperationStatus(result) { - const { state, stateProxy, status } = result; - logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + const { state, stateProxy, status, isDone, processResult, response, setErrorAsResult } = result; switch (status) { case "succeeded": { stateProxy.setSucceeded(state); @@ -17247,6 +17261,15 @@ function processOperationStatus(result) { break; } } + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } } function buildResult(inputs) { const { processResult, response, state } = inputs; @@ -17256,7 +17279,7 @@ function buildResult(inputs) { * Initiates the long-running operation. */ async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation } = inputs; + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); @@ -17267,41 +17290,33 @@ async function initOperation(inputs) { }; logger.verbose(`LRO: Operation description:`, config); const state = stateProxy.initState(config); - const status = getOperationStatus(response, state); - if (status === "succeeded" || operationLocation === undefined) { - stateProxy.setSucceeded(state); - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); return state; } async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, resourceLocation, getOperationStatus, options, } = inputs; + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, options, } = inputs; const response = await poll(operationLocation, options).catch(setStateError({ state, stateProxy, })); const status = getOperationStatus(response, state); - processOperationStatus({ - status, - state, - stateProxy, - }); - if (status === "succeeded" && resourceLocation !== undefined) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy })), - status, - }; + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy })), + status, + }; + } } return { response, status }; } /** Polls the long-running operation. */ async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getOperationLocation, withOperationLocation, getPollingInterval, processResult, updateState, setDelay, isDone, } = inputs; - const { operationLocation, resourceLocation } = state.config; + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, withOperationLocation, getPollingInterval, processResult, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; if (operationLocation !== undefined) { const { response, status } = await pollOperationHelper({ poll, @@ -17309,18 +17324,19 @@ async function pollOperation(inputs) { state, stateProxy, operationLocation, - resourceLocation, + getResourceLocation, options, }); - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || - (isDone === undefined && ["succeeded", "canceled"].includes(status))) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } - else { + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); @@ -17411,15 +17427,21 @@ function inferLroMode(inputs) { return undefined; } } -function transformStatus(status) { - switch (status === null || status === void 0 ? void 0 : status.toLowerCase()) { +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: + return toOperationStatus(statusCode); case "succeeded": return "succeeded"; case "failed": return "failed"; case "running": case "accepted": + case "started": case "canceling": case "cancelling": return "running"; @@ -17435,13 +17457,13 @@ function transformStatus(status) { function getStatus(rawResponse) { var _a; const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return transformStatus(status); + return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { var _a, _b; const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus(state); + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { if (statusCode === 202) { @@ -17473,11 +17495,28 @@ function calculatePollingIntervalFromDate(retryAfterDate) { } return undefined; } +function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} /** * Initiates the long-running operation. */ async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro } = inputs; + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; return initOperation({ init: async () => { const response = await lro.sendInitialRequest(); @@ -17493,14 +17532,8 @@ async function initHttpOperation(inputs) { processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: (response, state) => { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - return mode === undefined || - (mode === "Body" && getOperationStatus(response, state) === "succeeded") - ? "succeeded" - : "running"; - }, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, }); } function getOperationLocation({ rawResponse }, state) { @@ -17536,12 +17569,21 @@ function getOperationStatus({ rawResponse }, state) { return getProvisioningState(rawResponse); } default: - throw new Error(`Unexpected operation mode: ${mode}`); + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } } +function getResourceLocation({ flatResponse }, state) { + if (typeof flatResponse === "object") { + const resourceLocation = flatResponse.resourceLocation; + if (resourceLocation !== undefined) { + state.config.resourceLocation = resourceLocation; + } + } + return state.config.resourceLocation; +} /** Polls the long-running operation. */ async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state } = inputs; + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; return pollOperation({ state, stateProxy, @@ -17553,12 +17595,14 @@ async function pollHttpOperation(inputs) { getPollingInterval: parseRetryAfter, getOperationLocation, getOperationStatus, + getResourceLocation, options, /** * The expansion here is intentional because `lro` could be an object that * references an inner this, so we need to preserve a reference to it. */ poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, }); } @@ -17639,7 +17683,7 @@ const createStateProxy$1 = () => ({ * Returns a poller factory. */ function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, getPollingInterval, } = inputs; + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, getResourceLocation, getPollingInterval, resolveOnUnsuccessful, } = inputs; return async ({ init, poll }, options) => { const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; const stateProxy = createStateProxy$1(); @@ -17663,6 +17707,7 @@ function buildCreatePoller(inputs) { processResult, getOperationStatus: getStatusFromInitialResponse, withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, }); let resultPromise; let cancelJob; @@ -17706,10 +17751,14 @@ function buildCreatePoller(inputs) { return poller.getResult(); } case "canceled": { - throw new Error("Operation was canceled"); + if (!resolveOnUnsuccessful) + throw new Error("Operation was canceled"); + return poller.getResult(); } case "failed": { - throw state.error; + if (!resolveOnUnsuccessful) + throw state.error; + return poller.getResult(); } case "notStarted": case "running": { @@ -17729,18 +17778,20 @@ function buildCreatePoller(inputs) { withOperationLocation, getPollingInterval, getOperationStatus: getStatusFromPollResponse, + getResourceLocation, processResult, updateState, options: pollOptions, setDelay: (pollIntervalInMs) => { currentPollIntervalInMs = pollIntervalInMs; }, + setErrorAsResult: !resolveOnUnsuccessful, }); await handleProgressEvents(); - if (state.status === "canceled") { + if (state.status === "canceled" && !resolveOnUnsuccessful) { throw new Error("Operation was canceled"); } - if (state.status === "failed") { + if (state.status === "failed" && !resolveOnUnsuccessful) { throw state.error; } }, @@ -17757,19 +17808,14 @@ function buildCreatePoller(inputs) { * @returns an initialized poller */ async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, } = options || {}; + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; return buildCreatePoller({ - getStatusFromInitialResponse: (response, state) => { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - return mode === undefined || - (mode === "Body" && getOperationStatus(response, state) === "succeeded") - ? "succeeded" - : "running"; - }, + getStatusFromInitialResponse, getStatusFromPollResponse: getOperationStatus, getOperationLocation, + getResourceLocation, getPollingInterval: parseRetryAfter, + resolveOnUnsuccessful, })({ init: async () => { const response = await lro.sendInitialRequest(); @@ -17812,9 +17858,10 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); class GenericPollOperation { - constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; + this.setErrorAsResult = setErrorAsResult; this.lroResourceLocationConfig = lroResourceLocationConfig; this.processResult = processResult; this.updateState = updateState; @@ -17832,11 +17879,12 @@ class GenericPollOperation { stateProxy, resourceLocationConfig: this.lroResourceLocationConfig, processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, }))); } const updateState = this.updateState; const isDone = this.isDone; - if (!this.state.isCompleted) { + if (!this.state.isCompleted && this.state.error === undefined) { await pollHttpOperation({ lro: this.lro, state: this.state, @@ -17852,6 +17900,7 @@ class GenericPollOperation { setDelay: (intervalInMs) => { this.pollerConfig.intervalInMs = intervalInMs; }, + setErrorAsResult: this.setErrorAsResult, }); } (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); @@ -18024,6 +18073,8 @@ class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; @@ -18061,15 +18112,10 @@ class Poller { */ async pollOnce(options = {}) { if (!this.isDone()) { - try { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this), - }); - } - catch (e) { - this.operation.state.error = e; - } + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); } this.processUpdatedState(); } @@ -18113,22 +18159,26 @@ class Poller { processUpdatedState() { if (this.operation.state.error) { this.stopped = true; - this.reject(this.operation.state.error); - throw this.operation.state.error; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } } if (this.operation.state.isCancelled) { this.stopped = true; - const error = new PollerCancelledError("Operation was canceled"); - this.reject(error); - throw error; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } } - else if (this.isDone() && this.resolve) { + if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. // However, it can be the case that TResult is instantiated to void, so // we are not expecting a result anyway. To assert that we might not // have a result eventually after finishing polling, we cast the result // to TResult. - this.resolve(this.operation.state.result); + this.resolve(this.getResult()); } } /** @@ -18273,12 +18323,13 @@ class Poller { */ class LroEngine extends Poller { constructor(lro, options) { - const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom } = options || {}; + const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom ? deserializeState(resumeFrom) : {}; - const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; this.config = { intervalInMs: intervalInMs }; operation.setPollerConfig(this.config); } @@ -18945,6 +18996,7 @@ exports.setSpanContext = setSpanContext; Object.defineProperty(exports, "__esModule", ({ value: true })); +var abortController = __nccwpck_require__(2557); var crypto = __nccwpck_require__(6113); // Copyright (c) Microsoft Corporation. @@ -18957,13 +19009,77 @@ const isNode = typeof process !== "undefined" && Boolean(process.version) && Boo // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} + +// Copyright (c) Microsoft Corporation. +const StandardAbortMessage = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options * @returns Promise that is resolved after timeInMs */ -function delay(timeInMs) { - return new Promise((resolve) => setTimeout(() => resolve(), timeInMs)); +function delay(timeInMs, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + var _a; + return reject(new abortController.AbortError((_a = options === null || options === void 0 ? void 0 : options.abortErrorMsg) !== null && _a !== void 0 ? _a : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(); + }, timeInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); } // Copyright (c) Microsoft Corporation. @@ -19061,40 +19177,6 @@ async function computeSha256Hash(content, encoding) { return crypto.createHash("sha256").update(content).digest(encoding); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. - */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; - } - } - return true; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified property. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. - */ -function objectHasProperty(thing, property) { - return (isDefined(thing) && typeof thing === "object" && property in thing); -} - exports.computeSha256Hash = computeSha256Hash; exports.computeSha256Hmac = computeSha256Hmac; exports.delay = delay; @@ -64364,522 +64446,522 @@ var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto var external_os_ = __nccwpck_require__(2037); var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); ;// CONCATENATED MODULE: ./src/utils.ts - - -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); - core.info(`[warning] ${stderr}`); - throw e; - } - return stdout; -} + + +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); + core.info(`[warning] ${stderr}`); + throw e; + } + return stdout; +} ;// CONCATENATED MODULE: ./src/workspace.ts - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages() { - let packages = []; - try { - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { - cwd: this.root, - })); - for (const pkg of meta.packages) { - if (pkg.manifest_path.startsWith(this.root)) { - continue; - } - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch { } - return packages; - } -} + + +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages() { + let packages = []; + try { + const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { + cwd: this.root, + })); + for (const pkg of meta.packages) { + if (pkg.manifest_path.startsWith(this.root)) { + continue; + } + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); + } + } + catch { } + return packages; + } +} ;// CONCATENATED MODULE: ./src/config.ts - - - - - - - - -const HOME = external_os_default().homedir(); -const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; -const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; -const STATE_BINS = "RUST_CACHE_BINS"; -const STATE_KEY = "RUST_CACHE_KEY"; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** The workspace configurations */ - this.workspaces = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = `v0-rust`; - const sharedKey = core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${hasher.digest("hex")}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - // This part is computed in the "pre"/"restore" part of the job and persisted - // into the `state`. That state is loaded in the "post"/"save" part of the - // job so we have consistent values even though the "main" actions run - // might create/overwrite lockfiles. - let lockHash = core.getState(STATE_LOCKFILE_HASH); - let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]"); - if (!lockHash) { - const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { - followSymbolicLinks: false, - }); - keyFiles = await globber.glob(); - keyFiles.sort((a, b) => a.localeCompare(b)); - hasher = external_crypto_default().createHash("sha1"); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - lockHash = hasher.digest("hex"); - core.saveState(STATE_LOCKFILE_HASH, lockHash); - core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); - } - self.keyFiles = keyFiles; - key += `-${lockHash}`; - self.cacheKey = key; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; - return self; - } - printInfo() { - core.startGroup("Cache Configuration"); - core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - core.info(` ${workspace.root}`); - } - core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - core.info(` ${path}`); - } - core.info(`Restore Key:`); - core.info(` ${this.restoreKey}`); - core.info(`Cache Key:`); - core.info(` ${this.cacheKey}`); - core.info(`.. Prefix:`); - core.info(` - ${this.keyPrefix}`); - core.info(`.. Environment considered:`); - core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - core.info(` - ${env}`); - } - core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - core.info(` - ${file}`); - } - core.endGroup(); - } -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} + + + + + + + + +const HOME = external_os_default().homedir(); +const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); +const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; +const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; +const STATE_BINS = "RUST_CACHE_BINS"; +const STATE_KEY = "RUST_CACHE_KEY"; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** The workspace configurations */ + this.workspaces = []; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = `v0-rust`; + const sharedKey = core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if (job) { + key += `-${job}`; + } + } + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = external_crypto_default().createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + key += `-${hasher.digest("hex")}`; + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + // This part is computed in the "pre"/"restore" part of the job and persisted + // into the `state`. That state is loaded in the "post"/"save" part of the + // job so we have consistent values even though the "main" actions run + // might create/overwrite lockfiles. + let lockHash = core.getState(STATE_LOCKFILE_HASH); + let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]"); + if (!lockHash) { + const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { + followSymbolicLinks: false, + }); + keyFiles = await globber.glob(); + keyFiles.sort((a, b) => a.localeCompare(b)); + hasher = external_crypto_default().createHash("sha1"); + for (const file of keyFiles) { + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); + } + } + lockHash = hasher.digest("hex"); + core.saveState(STATE_LOCKFILE_HASH, lockHash); + core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); + } + self.keyFiles = keyFiles; + key += `-${lockHash}`; + self.cacheKey = key; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = external_path_default().resolve(root); + target = external_path_default().join(root, target); + workspaces.push(new Workspace(root, target)); + } + self.workspaces = workspaces; + self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; + return self; + } + printInfo() { + core.startGroup("Cache Configuration"); + core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + core.info(` ${workspace.root}`); + } + core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + core.info(` ${path}`); + } + core.info(`Restore Key:`); + core.info(` ${this.restoreKey}`); + core.info(`Cache Key:`); + core.info(` ${this.cacheKey}`); + core.info(`.. Prefix:`); + core.info(` - ${this.keyPrefix}`); + core.info(`.. Environment considered:`); + core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + core.info(` - ${env}`); + } + core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + core.info(` - ${file}`); + } + core.endGroup(); + } +} +async function getRustVersion() { + const stdout = await getCmdOutput("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} ;// CONCATENATED MODULE: ./src/cleanup.ts - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - core.debug(`cleaning profile directory "${profileDir}"`); - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -async function cleanBin() { - const bins = await getCargoBins(); - const oldBins = JSON.parse(core.getState(STATE_BINS)); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(external_path_default().join(CARGO_HOME, "registry", "src")); - // `.cargo/registry/index` - const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/gitpro.ttaallkk.top-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = external_path_default().join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(external_path_default().join(dirPath, ".git"))) { - await rmRF(external_path_default().join(dirPath, ".cache")); - } - // TODO: else, clean `.cache` based on the `packages` - } - } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - // `.cargo/registry/cache` - const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/gitpro.ttaallkk.top-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -async function cleanGit(packages) { - const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); - const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await external_fs_default().promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await external_fs_default().promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName`, except the ones matching - * any string in the `keepPrefix` set. - * - * The matching strips and trailing `-$hash` suffix. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - let isOutdated = false; - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - } - if (!keepPrefix.has(name) || isOutdated) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} -async function exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} + + + + + +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await external_fs_default().promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = external_path_default().join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + core.debug(`cleaning profile directory "${profileDir}"`); + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); + const oldBins = JSON.parse(core.getState(STATE_BINS)); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages) { + // `.cargo/registry/src` + // we can remove this completely, as cargo will recreate this from `cache` + await rmRF(external_path_default().join(CARGO_HOME, "registry", "src")); + // `.cargo/registry/index` + const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/gitpro.ttaallkk.top-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = external_path_default().join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await exists(external_path_default().join(dirPath, ".git"))) { + await rmRF(external_path_default().join(dirPath, ".cache")); + } + // TODO: else, clean `.cache` based on the `packages` + } + } + const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/cache` + const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/gitpro.ttaallkk.top-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +async function cleanGit(packages) { + const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); + const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await external_fs_default().promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await external_fs_default().promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName`, except the ones matching + * any string in the `keepPrefix` set. + * + * The matching strips and trailing `-$hash` suffix. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of dir) { + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + let isOutdated = false; + if (checkTimestamp) { + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + } + if (!keepPrefix.has(name) || isOutdated) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = external_path_default().join(parent, dirent.name); + core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await external_fs_default().promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} +async function exists(path) { + try { + await external_fs_default().promises.access(path); + return true; + } + catch { + return false; + } +} ;// CONCATENATED MODULE: ./src/save.ts - - - - - -process.on("uncaughtException", (e) => { - core.info(`[warning] ${e.message}`); - if (e.stack) { - core.info(e.stack); - } -}); -async function run() { - if (!cache.isFeatureAvailable()) { - return; - } - try { - const config = await CacheConfig["new"](); - config.printInfo(); - core.info(""); - if (core.getState(STATE_KEY) === config.cacheKey) { - core.info(`Cache up-to-date.`); - return; - } - // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands - await macOsWorkaround(); - const allPackages = []; - for (const workspace of config.workspaces) { - const packages = await workspace.getPackages(); - allPackages.push(...packages); - try { - core.info(`... Cleaning ${workspace.target} ...`); - await cleanTargetDir(workspace.target, packages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - } - try { - core.info(`... Cleaning cargo registry ...`); - await cleanRegistry(allPackages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - try { - core.info(`... Cleaning cargo/bin ...`); - await cleanBin(); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - try { - core.info(`... Cleaning cargo git cache ...`); - await cleanGit(allPackages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - core.info(`... Saving cache ...`); - await cache.saveCache(config.cachePaths, config.cacheKey); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } -} -run(); -async function macOsWorkaround() { - try { - // Workaround for https://github.com/actions/cache/issues/403 - // Also see https://github.com/rust-lang/cargo/issues/8603 - await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); - } - catch { } -} + + + + + +process.on("uncaughtException", (e) => { + core.info(`[warning] ${e.message}`); + if (e.stack) { + core.info(e.stack); + } +}); +async function run() { + if (!cache.isFeatureAvailable()) { + return; + } + try { + const config = await CacheConfig["new"](); + config.printInfo(); + core.info(""); + if (core.getState(STATE_KEY) === config.cacheKey) { + core.info(`Cache up-to-date.`); + return; + } + // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands + await macOsWorkaround(); + const allPackages = []; + for (const workspace of config.workspaces) { + const packages = await workspace.getPackages(); + allPackages.push(...packages); + try { + core.info(`... Cleaning ${workspace.target} ...`); + await cleanTargetDir(workspace.target, packages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + } + try { + core.info(`... Cleaning cargo registry ...`); + await cleanRegistry(allPackages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + try { + core.info(`... Cleaning cargo/bin ...`); + await cleanBin(); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + try { + core.info(`... Cleaning cargo git cache ...`); + await cleanGit(allPackages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + core.info(`... Saving cache ...`); + await cache.saveCache(config.cachePaths, config.cacheKey); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } +} +run(); +async function macOsWorkaround() { + try { + // Workaround for https://github.com/actions/cache/issues/403 + // Also see https://github.com/rust-lang/cargo/issues/8603 + await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); + } + catch { } +} })(); diff --git a/package-lock.json b/package-lock.json index 09c33b4..7b7c674 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,10 +5,11 @@ "requires": true, "packages": { "": { + "name": "rust-cache", "version": "2.0.0", "license": "LGPL-3.0", "dependencies": { - "@actions/cache": "^3.0.4", + "@actions/cache": "^3.0.5", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/glob": "^0.3.0", @@ -16,18 +17,18 @@ }, "devDependencies": { "@vercel/ncc": "^0.34.0", - "typescript": "4.8.2" + "typescript": "4.8.4" }, "funding": { "url": "https://github.com/sponsors/Swatinem" } }, "node_modules/@actions/cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", - "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz", + "integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==", "dependencies": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", @@ -193,9 +194,9 @@ } }, "node_modules/@azure/core-lro": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.3.0.tgz", - "integrity": "sha512-n53pk9Gs450rv1zDr9H7aPmMkYHMu9Bwks9qFlK+P46b4virATRf3TNuBZH7DIGVs8ePjtRCNYhcM4D+/Gyn6A==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.4.0.tgz", + "integrity": "sha512-F65+rYkll1dpw3RGm8/SSiSj+/QkMeYDanzS/QKlM1dmuneVyXbO46C88V1MRHluLGdMP6qfD3vDRYALn0z0tQ==", "dependencies": { "@azure/abort-controller": "^1.0.0", "@azure/logger": "^1.0.0", @@ -244,10 +245,11 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@azure/core-util": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.1.0.tgz", - "integrity": "sha512-+i93lNJNA3Pl3KSuC6xKP2jTL4YFeDfO6VNOaYdk0cppZcLCxt811gS878VsqsCisaltdhl9lhMzK5kbxCiF4w==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.1.1.tgz", + "integrity": "sha512-A4TBYVQCtHOigFb2ETiiKFDocBoI1Zk2Ui1KpI42aJSIDexF7DHQFpnjonltXAIU/ceH+1fsZAWWgvX6/AKzog==", "dependencies": { + "@azure/abort-controller": "^1.0.0", "tslib": "^2.2.0" }, "engines": { @@ -331,9 +333,9 @@ } }, "node_modules/@types/node": { - "version": "18.7.14", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.14.tgz", - "integrity": "sha512-6bbDaETVi8oyIARulOE9qF1/Qdi/23z6emrUh0fNJRUmjznqrixD4MpGDdgOFk5Xb0m2H6Xu42JGdvAxaJR/wA==" + "version": "18.8.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.8.5.tgz", + "integrity": "sha512-Bq7G3AErwe5A/Zki5fdD3O6+0zDChhg671NfPjtIcbtzDNZTv4NPKMRFr7gtYPG7y+B8uTiNK4Ngd9T0FTar6Q==" }, "node_modules/@types/node-fetch": { "version": "2.6.2", @@ -370,7 +372,6 @@ "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", "dev": true, - "license": "MIT", "bin": { "ncc": "dist/ncc/cli.js" } @@ -591,11 +592,10 @@ } }, "node_modules/typescript": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.2.tgz", - "integrity": "sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==", + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", "dev": true, - "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -667,11 +667,11 @@ }, "dependencies": { "@actions/cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", - "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz", + "integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==", "requires": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", @@ -826,9 +826,9 @@ } }, "@azure/core-lro": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.3.0.tgz", - "integrity": "sha512-n53pk9Gs450rv1zDr9H7aPmMkYHMu9Bwks9qFlK+P46b4virATRf3TNuBZH7DIGVs8ePjtRCNYhcM4D+/Gyn6A==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.4.0.tgz", + "integrity": "sha512-F65+rYkll1dpw3RGm8/SSiSj+/QkMeYDanzS/QKlM1dmuneVyXbO46C88V1MRHluLGdMP6qfD3vDRYALn0z0tQ==", "requires": { "@azure/abort-controller": "^1.0.0", "@azure/logger": "^1.0.0", @@ -874,10 +874,11 @@ } }, "@azure/core-util": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.1.0.tgz", - "integrity": "sha512-+i93lNJNA3Pl3KSuC6xKP2jTL4YFeDfO6VNOaYdk0cppZcLCxt811gS878VsqsCisaltdhl9lhMzK5kbxCiF4w==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.1.1.tgz", + "integrity": "sha512-A4TBYVQCtHOigFb2ETiiKFDocBoI1Zk2Ui1KpI42aJSIDexF7DHQFpnjonltXAIU/ceH+1fsZAWWgvX6/AKzog==", "requires": { + "@azure/abort-controller": "^1.0.0", "tslib": "^2.2.0" }, "dependencies": { @@ -954,9 +955,9 @@ "integrity": "sha512-0nBr+VZNKm9tvNDZFstI3Pq1fCTEDK5OZTnVKNvBNAKgd0yIvmwsP4m61rEv7ZP+tOUjWJhROpxK5MsnlF911g==" }, "@types/node": { - "version": "18.7.14", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.14.tgz", - "integrity": "sha512-6bbDaETVi8oyIARulOE9qF1/Qdi/23z6emrUh0fNJRUmjznqrixD4MpGDdgOFk5Xb0m2H6Xu42JGdvAxaJR/wA==" + "version": "18.8.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.8.5.tgz", + "integrity": "sha512-Bq7G3AErwe5A/Zki5fdD3O6+0zDChhg671NfPjtIcbtzDNZTv4NPKMRFr7gtYPG7y+B8uTiNK4Ngd9T0FTar6Q==" }, "@types/node-fetch": { "version": "2.6.2", @@ -1153,9 +1154,9 @@ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, "typescript": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.2.tgz", - "integrity": "sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==", + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", "dev": true }, "universalify": { diff --git a/package.json b/package.json index 57dfb2b..cd23519 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ }, "homepage": "https://github.com/Swatinem/rust-cache#readme", "dependencies": { - "@actions/cache": "^3.0.4", + "@actions/cache": "^3.0.5", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/glob": "^0.3.0", @@ -30,7 +30,7 @@ }, "devDependencies": { "@vercel/ncc": "^0.34.0", - "typescript": "4.8.2" + "typescript": "4.8.4" }, "scripts": { "prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"