diff --git a/package-lock.json b/package-lock.json index 4a68ffa8..35cbbed3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -51,9 +51,9 @@ } }, "node_modules/@actions/artifact": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz", - "integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.9.tgz", + "integrity": "sha512-f9JXC9JrwramDRJHZiIOKJo3PGw/V3riYegLj5kHi8YEJ2k72TNUd1zDW1BG50ILnzJ0cp1faDVJ2pSdolRQfg==", "dependencies": { "@actions/core": "^1.10.0", "@actions/github": "^5.1.1", @@ -10447,7 +10447,7 @@ "version": "1.0.0", "license": "Apache-2.0", "dependencies": { - "@actions/artifact": "^2.1.8", + "@actions/artifact": "^2.1.9", "@actions/cache": "^3.2.4", "@actions/core": "^1.10.1", "@actions/exec": "^1.1.0", diff --git a/scan/dist/index.js b/scan/dist/index.js index 8cea3810..5c3a76c9 100644 --- a/scan/dist/index.js +++ b/scan/dist/index.js @@ -93310,7 +93310,7 @@ var require_config = __commonJS({ return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getConcurrency = exports2.getGitHubWorkspaceDir = exports2.isGhes = exports2.getResultsServiceUrl = exports2.getRuntimeToken = exports2.getUploadChunkSize = void 0; + exports2.getUploadChunkTimeout = exports2.getConcurrency = exports2.getGitHubWorkspaceDir = exports2.isGhes = exports2.getResultsServiceUrl = exports2.getRuntimeToken = exports2.getUploadChunkSize = void 0; var os_1 = __importDefault2(require("os")); function getUploadChunkSize() { return 8 * 1024 * 1024; @@ -93364,6 +93364,11 @@ var require_config = __commonJS({ } __name(getConcurrency, "getConcurrency"); exports2.getConcurrency = getConcurrency; + function getUploadChunkTimeout() { + return 3e4; + } + __name(getUploadChunkTimeout, "getUploadChunkTimeout"); + exports2.getUploadChunkTimeout = getUploadChunkTimeout; } }); @@ -101553,7 +101558,7 @@ var require_package = __commonJS({ "../node_modules/@actions/artifact/package.json"(exports2, module2) { module2.exports = { name: "@actions/artifact", - version: "2.1.8", + version: "2.1.9", preview: true, description: "Actions artifact lib", keywords: [ @@ -102203,34 +102208,34 @@ var require_blob_upload = __commonJS({ return __awaiter3(this, void 0, void 0, function* () { let uploadByteCount = 0; let lastProgressTime = Date.now(); - let timeoutId; - const chunkTimer = /* @__PURE__ */ __name((timeout) => { - if (timeoutId) { - clearTimeout(timeoutId); - } - timeoutId = setTimeout(() => { - const now = Date.now(); - if (now - lastProgressTime > timeout) { - throw new Error("Upload progress stalled."); - } - }, timeout); - return timeoutId; - }, "chunkTimer"); + const abortController = new AbortController(); + const chunkTimer = /* @__PURE__ */ __name((interval) => __awaiter3(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const timer = setInterval(() => { + if (Date.now() - lastProgressTime > interval) { + reject(new Error("Upload progress stalled.")); + } + }, interval); + abortController.signal.addEventListener("abort", () => { + clearInterval(timer); + resolve(); + }); + }); + }), "chunkTimer"); const maxConcurrency = (0, config_1.getConcurrency)(); const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - const timeoutDuration = 3e5; core2.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = /* @__PURE__ */ __name((progress) => { core2.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; - chunkTimer(timeoutDuration); lastProgressTime = Date.now(); }, "uploadCallback"); const options = { blobHTTPHeaders: { blobContentType: "zip" }, - onProgress: uploadCallback + onProgress: uploadCallback, + abortSignal: abortController.signal }; let sha256Hash = void 0; const uploadStream = new stream.PassThrough(); @@ -102239,17 +102244,17 @@ var require_blob_upload = __commonJS({ zipUploadStream.pipe(hashStream).setEncoding("hex"); core2.info("Beginning upload of artifact content to blob storage"); try { - timeoutId = chunkTimer(timeoutDuration); - yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); + yield Promise.race([ + blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), + chunkTimer((0, config_1.getUploadChunkTimeout)()) + ]); } catch (error) { if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); } throw error; } finally { - if (timeoutId) { - clearTimeout(timeoutId); - } + abortController.abort(); } core2.info("Finished uploading artifact content to blob storage!"); hashStream.end(); @@ -127470,7 +127475,6 @@ var require_zip2 = __commonJS({ var stream = __importStar3(require("stream")); var archiver = __importStar3(require_archiver()); var core2 = __importStar3(require_core()); - var fs_1 = require("fs"); var config_1 = require_config(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream.Transform { @@ -127501,7 +127505,7 @@ var require_zip2 = __commonJS({ zip.on("end", zipEndCallback); for (const file of uploadSpecification) { if (file.sourcePath !== null) { - zip.append((0, fs_1.createReadStream)(file.sourcePath), { + zip.file(file.sourcePath, { name: file.destinationPath }); } else { diff --git a/scan/package.json b/scan/package.json index edcfaa63..684ad697 100644 --- a/scan/package.json +++ b/scan/package.json @@ -16,7 +16,7 @@ "url": "git+https://github.com/JetBrains/qodana-action.git" }, "dependencies": { - "@actions/artifact": "^2.1.8", + "@actions/artifact": "^2.1.9", "@actions/cache": "^3.2.4", "@actions/core": "^1.10.1", "@actions/exec": "^1.1.0",