diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 3d35e98242..335589de72 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -32421,7 +32421,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core18 = __importStar2(require_core2()); + var core17 = __importStar2(require_core2()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -32433,23 +32433,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core18.debug(`implicitDescendants '${result.implicitDescendants}'`); + core17.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core18.debug(`matchDirectories '${result.matchDirectories}'`); + core17.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -33923,7 +33923,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core18 = __importStar2(require_core2()); + var core17 = __importStar2(require_core2()); var fs18 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path16 = __importStar2(require("path")); @@ -33976,7 +33976,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core18.debug(`Search path '${searchPath}'`); + core17.debug(`Search path '${searchPath}'`); try { yield __await2(fs18.promises.lstat(searchPath)); } catch (err) { @@ -34051,7 +34051,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core18.debug(`Broken symlink '${item.path}'`); + core17.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -34067,7 +34067,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -34160,7 +34160,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto2 = __importStar2(require("crypto")); - var core18 = __importStar2(require_core2()); + var core17 = __importStar2(require_core2()); var fs18 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -34169,7 +34169,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter2(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core18.info : core18.debug; + const writeDelegate = verbose ? core17.info : core17.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -35562,7 +35562,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var exec3 = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -35613,7 +35613,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core18.debug(`Matched: ${relativeFile}`); + core17.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -35641,7 +35641,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec3.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -35652,10 +35652,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core18.debug(err.message); + core17.debug(err.message); } versionOutput = versionOutput.trim(); - core18.debug(versionOutput); + core17.debug(versionOutput); return versionOutput; }); } @@ -35663,7 +35663,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core18.debug(`zstd version: ${version}`); + core17.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75274,7 +75274,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75316,7 +75316,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75373,14 +75373,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -75465,7 +75465,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry3; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants7(); function isSuccessStatusCode(statusCode) { @@ -75523,9 +75523,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core18.debug(`${name} - Error is not retryable`); + core17.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75784,7 +75784,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -75822,7 +75822,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -75856,7 +75856,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75906,7 +75906,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -75917,7 +75917,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core18.debug("Unable to validate download, no Content-Length header"); + core17.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76035,7 +76035,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core18.debug("Unable to determine content length, downloading file with http-client..."); + core17.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76125,7 +76125,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76145,9 +76145,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -76183,12 +76183,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core18.debug(`Download concurrency: ${result.downloadConcurrency}`); - core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core18.debug(`Lookup only: ${result.lookupOnly}`); + core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core17.debug(`Download concurrency: ${result.downloadConcurrency}`); + core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core17.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -76382,7 +76382,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs18 = __importStar2(require("fs")); @@ -76400,7 +76400,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core18.debug(`Resource Url: ${url2}`); + core17.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -76428,7 +76428,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core18.isDebug()) { + if (core17.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76441,9 +76441,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core18.setSecret(cacheDownloadUrl); - core18.debug(`Cache Result:`); - core18.debug(JSON.stringify(cacheResult)); + core17.setSecret(cacheDownloadUrl); + core17.debug(`Cache Result:`); + core17.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76457,10 +76457,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76503,7 +76503,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76525,7 +76525,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core18.debug("Awaiting all uploads"); + core17.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -76568,16 +76568,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core18.debug("Upload cache"); + core17.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core18.debug("Commiting cache"); + core17.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core18.info("Cache saved successfully"); + core17.info("Cache saved successfully"); } }); } @@ -82060,7 +82060,7 @@ var require_cache4 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var path16 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -82119,7 +82119,7 @@ var require_cache4 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core18.debug(`Cache service version: ${cacheServiceVersion}`); + core17.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82134,8 +82134,8 @@ var require_cache4 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core18.debug("Resolved Keys:"); - core18.debug(JSON.stringify(keys)); + core17.debug("Resolved Keys:"); + core17.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82153,19 +82153,19 @@ var require_cache4 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core18.info("Lookup only - skipping download"); + core17.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core18.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core18.info("Cache restored successfully"); + core17.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -82173,16 +82173,16 @@ var require_cache4 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to restore: ${error3.message}`); + core17.error(`Failed to restore: ${error3.message}`); } else { - core18.warning(`Failed to restore: ${error3.message}`); + core17.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core18.debug(`Failed to delete archive: ${error3}`); + core17.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -82193,8 +82193,8 @@ var require_cache4 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core18.debug("Resolved Keys:"); - core18.debug(JSON.stringify(keys)); + core17.debug("Resolved Keys:"); + core17.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82212,30 +82212,30 @@ var require_cache4 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core18.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core17.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core18.info(`Cache hit for restore-key: ${response.matchedKey}`); + core17.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core18.info(`Cache hit for: ${response.matchedKey}`); + core17.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core18.info("Lookup only - skipping download"); + core17.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive path: ${archivePath}`); - core18.debug(`Starting download of archive to: ${archivePath}`); + core17.debug(`Archive path: ${archivePath}`); + core17.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core18.isDebug()) { + core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core18.info("Cache restored successfully"); + core17.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -82243,9 +82243,9 @@ var require_cache4 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to restore: ${error3.message}`); + core17.error(`Failed to restore: ${error3.message}`); } else { - core18.warning(`Failed to restore: ${error3.message}`); + core17.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -82254,7 +82254,7 @@ var require_cache4 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core18.debug(`Failed to delete archive: ${error3}`); + core17.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -82263,7 +82263,7 @@ var require_cache4 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core18.debug(`Cache service version: ${cacheServiceVersion}`); + core17.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82281,26 +82281,26 @@ var require_cache4 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core18.debug("Cache Paths:"); - core18.debug(`${JSON.stringify(cachePaths)}`); + core17.debug("Cache Paths:"); + core17.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core18.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.debug(`File Size: ${archiveFileSize}`); + core17.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core18.debug("Reserving Cache"); + core17.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82313,26 +82313,26 @@ var require_cache4 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core18.debug(`Saving Cache (ID: ${cacheId})`); + core17.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core18.info(`Failed to save: ${typedError.message}`); + core17.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to save: ${typedError.message}`); + core17.error(`Failed to save: ${typedError.message}`); } else { - core18.warning(`Failed to save: ${typedError.message}`); + core17.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core18.debug(`Failed to delete archive: ${error3}`); + core17.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -82345,23 +82345,23 @@ var require_cache4 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core18.debug("Cache Paths:"); - core18.debug(`${JSON.stringify(cachePaths)}`); + core17.debug("Cache Paths:"); + core17.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core18.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.debug(`File Size: ${archiveFileSize}`); + core17.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core18.debug("Reserving Cache"); + core17.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82372,16 +82372,16 @@ var require_cache4 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core18.warning(`Cache reservation failed: ${response.message}`); + core17.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core18.debug(`Failed to reserve cache: ${error3}`); + core17.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core18.debug(`Attempting to upload cache located at: ${archivePath}`); + core17.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82389,7 +82389,7 @@ var require_cache4 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -82402,21 +82402,21 @@ var require_cache4 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core18.info(`Failed to save: ${typedError.message}`); + core17.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core18.warning(typedError.message); + core17.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to save: ${typedError.message}`); + core17.error(`Failed to save: ${typedError.message}`); } else { - core18.warning(`Failed to save: ${typedError.message}`); + core17.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core18.debug(`Failed to delete archive: ${error3}`); + core17.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -82643,7 +82643,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82666,10 +82666,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core18.info(err.message); + core17.info(err.message); } const seconds = this.getSleepAmount(); - core18.info(`Waiting ${seconds} seconds before trying again`); + core17.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82772,7 +82772,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs18 = __importStar2(require("fs")); @@ -82801,8 +82801,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path16.dirname(dest)); - core18.debug(`Downloading ${url2}`); - core18.debug(`Destination ${dest}`); + core17.debug(`Downloading ${url2}`); + core17.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82828,7 +82828,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core18.debug("set auth"); + core17.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82837,7 +82837,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core18.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -82846,16 +82846,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs18.createWriteStream(dest)); - core18.debug("download complete"); + core17.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core18.debug("download failed"); + core17.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core18.debug(`Failed to delete '${dest}'. ${err.message}`); + core17.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82870,7 +82870,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -82923,7 +82923,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core18.debug("Checking tar --version"); + core17.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -82933,7 +82933,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core18.debug(versionOutput.trim()); + core17.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -82941,7 +82941,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core18.isDebug() && !flags.includes("v")) { + if (core17.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -82972,7 +82972,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core18.isDebug()) { + if (core17.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -83015,7 +83015,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core18.debug(`Using pwsh at path: ${pwshPath}`); + core17.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83035,7 +83035,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core18.debug(`Using powershell at path: ${powershellPath}`); + core17.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83044,7 +83044,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core18.isDebug()) { + if (!core17.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83055,8 +83055,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core18.debug(`Caching tool ${tool} ${version} ${arch2}`); - core18.debug(`source dir: ${sourceDir}`); + core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + core17.debug(`source dir: ${sourceDir}`); if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83073,14 +83073,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core18.debug(`Caching tool ${tool} ${version} ${arch2}`); - core18.debug(`source file: ${sourceFile}`); + core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + core17.debug(`source file: ${sourceFile}`); if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core18.debug(`destination file ${destPath}`); + core17.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -83103,12 +83103,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core18.debug(`checking cache: ${cachePath}`); + core17.debug(`checking cache: ${cachePath}`); if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core18.debug("not found"); + core17.debug("not found"); } } return toolPath; @@ -83137,7 +83137,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core18.debug("set auth"); + core17.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83158,7 +83158,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core18.debug("Invalid json"); + core17.debug("Invalid json"); } } return releases; @@ -83182,7 +83182,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core18.debug(`destination ${folderPath}`); + core17.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -83194,18 +83194,18 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs18.writeFileSync(markerPath, ""); - core18.debug("finished caching tool"); + core17.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core18.debug(`isExplicit: ${c}`); + core17.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core18.debug(`explicit? ${valid3}`); + core17.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core18.debug(`evaluating ${versions.length} versions`); + core17.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -83221,9 +83221,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core18.debug(`matched: ${version}`); + core17.debug(`matched: ${version}`); } else { - core18.debug("match not found"); + core17.debug("match not found"); } return version; } @@ -85914,14 +85914,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = getExpiration; var generated_1 = require_generated(); - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core17.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -86268,7 +86268,7 @@ var require_util10 = __commonJS({ exports2.getBackendIdsFromToken = getBackendIdsFromToken; exports2.maskSigUrl = maskSigUrl; exports2.maskSecretUrls = maskSecretUrls; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault2(require_jwt_decode_cjs()); var core_1 = require_core(); @@ -86295,8 +86295,8 @@ var require_util10 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core17.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core17.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -86671,7 +86671,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = uploadZipToBlobStorage; var storage_blob_1 = require_commonjs15(); var config_1 = require_config2(); - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); var errors_1 = require_errors3(); @@ -86697,9 +86697,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core17.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core18.info(`Uploaded bytes ${progress.loadedBytes}`); + core17.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -86713,7 +86713,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto2.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core18.info("Beginning upload of artifact content to blob storage"); + core17.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -86727,12 +86727,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core18.info("Finished uploading artifact content to blob storage!"); + core17.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core18.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); + core17.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core17.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -111728,7 +111728,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar2(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar2(require_archiver()); - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -111745,7 +111745,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification_1) { return __awaiter2(this, arguments, void 0, function* (uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { - core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core17.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -111769,32 +111769,32 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core17.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core17.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; }); } var zipErrorCallback = (error3) => { - core18.error("An error has occurred while creating the zip file for upload"); - core18.info(error3); + core17.error("An error has occurred while creating the zip file for upload"); + core17.info(error3); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error3) => { if (error3.code === "ENOENT") { - core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core18.info(error3); + core17.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core17.info(error3); } else { - core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); - core18.info(error3); + core17.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); + core17.info(error3); } }; var zipFinishCallback = () => { - core18.debug("Zip stream for upload has finished."); + core17.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core18.debug("Zip stream for upload has ended."); + core17.debug("Zip stream for upload has ended."); }; } }); @@ -111869,7 +111869,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = uploadArtifact; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -111916,13 +111916,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core18.info(`Finalizing artifact upload`); + core17.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core17.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -114847,7 +114847,7 @@ var require_download_artifact = __commonJS({ var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); var github3 = __importStar2(require_github2()); - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var httpClient = __importStar2(require_lib()); var unzip_stream_1 = __importDefault2(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -114883,7 +114883,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url2, directory); } catch (error3) { retryCount++; - core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); + core17.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -114913,7 +114913,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error3) => { - core18.debug(`response.message: Artifact download failed: ${error3.message}`); + core17.debug(`response.message: Artifact download failed: ${error3.message}`); clearTimeout(timer); reject(error3); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -114921,7 +114921,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core17.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error3) => { @@ -114935,7 +114935,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github3.getOctokit(token); let digestMismatch = false; - core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core17.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -114952,16 +114952,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core17.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core18.info(`Starting download of artifact to: ${downloadPath}`); + core17.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core18.info(`Artifact download completed successfully.`); + core17.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core18.debug(`Expected digest: ${options.expectedHash}`); + core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core17.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -114987,7 +114987,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core18.warning("Multiple artifacts found, defaulting to first."); + core17.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -114995,16 +114995,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core17.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core18.info(`Starting download of artifact to: ${downloadPath}`); + core17.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core18.info(`Artifact download completed successfully.`); + core17.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core18.debug(`Expected digest: ${options.expectedHash}`); + core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core17.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -115016,10 +115016,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory() { return __awaiter2(this, arguments, void 0, function* (downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { if (!(yield exists(downloadPath))) { - core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core17.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core18.debug(`Artifact destination folder already exists: ${downloadPath}`); + core17.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -115070,7 +115070,7 @@ var require_retry_options = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = getRetryOptions; - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -115085,7 +115085,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core17.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } } @@ -115252,7 +115252,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = getArtifactInternal; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node17(); - var core18 = __importStar2(require_core()); + var core17 = __importStar2(require_core()); var utils_1 = require_utils10(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node16(); @@ -115290,7 +115290,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -115322,7 +115322,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -120057,7 +120057,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils12(); - var core18 = __importStar2(require_core4()); + var core17 = __importStar2(require_core4()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter2(this, void 0, void 0, function* () { @@ -120084,13 +120084,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error3.message; } if (!isRetryable) { - core18.info(`${name} - Error is not retryable`); + core17.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core17.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -120174,7 +120174,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs18 = __importStar2(require("fs")); - var core18 = __importStar2(require_core4()); + var core17 = __importStar2(require_core4()); var tmp = __importStar2(require_tmp_promise()); var stream2 = __importStar2(require("stream")); var utils_1 = require_utils12(); @@ -120239,7 +120239,7 @@ var require_upload_http_client = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core17.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -120276,15 +120276,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core18.isDebug()) { - core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core17.isDebug()) { + core17.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core18.error(`aborting artifact upload`); + core17.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -120293,7 +120293,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core17.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -120319,16 +120319,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core17.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs18.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core17.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -120340,7 +120340,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core18.warning(`Aborting upload for ${parameters.file} due to failure`); + core17.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -120349,16 +120349,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core17.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core17.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -120378,7 +120378,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core18.warning(`Aborting upload for ${parameters.file} due to failure`); + core17.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -120386,7 +120386,7 @@ var require_upload_http_client = __commonJS({ } } } - core18.debug(`deleting temporary gzip file ${tempFile.path}`); + core17.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -120425,7 +120425,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core17.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -120433,14 +120433,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter2(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core17.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -120448,7 +120448,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error3) { - core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core17.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error3); if (incrementAndCheckRetryLimit()) { return false; @@ -120460,13 +120460,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core17.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core17.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -120484,7 +120484,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core18.debug(`URL is ${resourceUrl.toString()}`); + core17.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -120497,7 +120497,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core17.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -120566,7 +120566,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs18 = __importStar2(require("fs")); - var core18 = __importStar2(require_core4()); + var core17 = __importStar2(require_core4()); var zlib3 = __importStar2(require("zlib")); var utils_1 = require_utils12(); var url_1 = require("url"); @@ -120620,11 +120620,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter2(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core17.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core17.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter2(this, void 0, void 0, function* () { @@ -120633,8 +120633,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core18.isDebug()) { - core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core17.isDebug()) { + core17.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -120672,19 +120672,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core17.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core18.info("Skipping download validation."); + core17.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -120705,7 +120705,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error3) { - core18.info("An error occurred while attempting to download a file"); + core17.info("An error occurred while attempting to download a file"); console.log(error3); yield backOff(); continue; @@ -120725,7 +120725,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core17.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -120747,29 +120747,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error3) => { - core18.info(`An error occurred while attempting to read the response stream`); + core17.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error3); }).pipe(gunzip).on("error", (error3) => { - core18.info(`An error occurred while attempting to decompress the response stream`); + core17.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } else { response.message.on("error", (error3) => { - core18.info(`An error occurred while attempting to read the response stream`); + core17.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } @@ -120908,7 +120908,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core18 = __importStar2(require_core4()); + var core17 = __importStar2(require_core4()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils12(); @@ -120929,7 +120929,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter2(this, void 0, void 0, function* () { - core18.info(`Starting artifact upload + core17.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -120941,24 +120941,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core18.warning(`No files found that can be uploaded`); + core17.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core18.debug(response.toString()); + core17.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core17.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core17.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core18.info(`File upload process has finished. Finalizing the artifact upload`); + core17.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core17.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core17.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core18.info(` + core17.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -120992,10 +120992,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path16 = (0, path_1.resolve)(path16); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path16, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core18.info("Directory structure has been set up for the artifact"); + core17.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -121011,7 +121011,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core18.info("Unable to find any artifacts for the associated workflow"); + core17.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path16) { @@ -121023,11 +121023,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path16, true); if (downloadSpecification.filesToDownload.length === 0) { - core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -123957,7 +123957,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core17 = __toESM(require_core()); +var core16 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -130569,243 +130569,14 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs17 = __toESM(require("fs")); -var core16 = __toESM(require_core()); var github2 = __toESM(require_github()); -// src/status-report.ts -var os3 = __toESM(require("os")); -var core13 = __toESM(require_core()); -function isFirstPartyAnalysis(actionName) { - if (actionName !== "upload-sarif" /* UploadSarif */) { - return true; - } - return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; -} -var JobStatus = /* @__PURE__ */ ((JobStatus2) => { - JobStatus2["UnknownStatus"] = "JOB_STATUS_UNKNOWN"; - JobStatus2["SuccessStatus"] = "JOB_STATUS_SUCCESS"; - JobStatus2["FailureStatus"] = "JOB_STATUS_FAILURE"; - JobStatus2["ConfigErrorStatus"] = "JOB_STATUS_CONFIGURATION_ERROR"; - return JobStatus2; -})(JobStatus || {}); -function getActionsStatus(error3, otherFailureCause) { - if (error3 || otherFailureCause) { - return error3 instanceof ConfigurationError ? "user-error" : "failure"; - } else { - return "success"; - } -} -function getJobStatusDisplayName(status) { - switch (status) { - case "JOB_STATUS_SUCCESS" /* SuccessStatus */: - return "success"; - case "JOB_STATUS_FAILURE" /* FailureStatus */: - return "failure"; - case "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */: - return "configuration error"; - case "JOB_STATUS_UNKNOWN" /* UnknownStatus */: - return "unknown"; - default: - assertNever(status); - } -} -function setJobStatusIfUnsuccessful(actionStatus) { - if (actionStatus === "user-error") { - core13.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ - ); - } else if (actionStatus === "failure" || actionStatus === "aborted") { - core13.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ - ); - } -} -async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { - try { - const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; - const ref = await getRef(); - const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; - const workflowRunID = getWorkflowRunID(); - const workflowRunAttempt = getWorkflowRunAttempt(); - const workflowName = process.env["GITHUB_WORKFLOW"] || ""; - const jobName = process.env["GITHUB_JOB"] || ""; - const analysis_key = await getAnalysisKey(); - let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; - if (workflowStartedAt === void 0) { - workflowStartedAt = actionStartedAt.toISOString(); - core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); - } - const runnerOs = getRequiredEnvParam("RUNNER_OS"); - const codeQlCliVersion = getCachedCodeQlVersion(); - const actionRef = process.env["GITHUB_ACTION_REF"] || ""; - const testingEnvironment = getTestingEnvironment(); - if (testingEnvironment) { - core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); - } - const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; - const statusReport = { - action_name: actionName, - action_oid: "unknown", - // TODO decide if it's possible to fill this in - action_ref: actionRef, - action_started_at: actionStartedAt.toISOString(), - action_version: getActionVersion(), - analysis_kinds: config?.analysisKinds?.join(","), - analysis_key, - build_mode: config?.buildMode, - commit_oid: commitOid, - first_party_analysis: isFirstPartyAnalysis(actionName), - job_name: jobName, - job_run_uuid: jobRunUUID, - ref, - runner_os: runnerOs, - started_at: workflowStartedAt, - status, - steady_state_default_setup: isSteadyStateDefaultSetupRun, - testing_environment: testingEnvironment || "", - workflow_name: workflowName, - workflow_run_attempt: workflowRunAttempt, - workflow_run_id: workflowRunID - }; - try { - statusReport.actions_event_name = getWorkflowEventName(); - } catch (e) { - logger.warning( - `Could not determine the workflow event name: ${getErrorMessage(e)}.` - ); - } - if (config) { - statusReport.languages = config.languages?.join(","); - } - if (diskInfo) { - statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; - statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; - } - if (cause) { - statusReport.cause = cause; - } - if (exception2) { - statusReport.exception = exception2; - } - if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { - statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); - } - const matrix = getRequiredInput("matrix"); - if (matrix) { - statusReport.matrix_vars = matrix; - } - if ("RUNNER_ARCH" in process.env) { - statusReport.runner_arch = process.env["RUNNER_ARCH"]; - } - if (!(runnerOs === "Linux" && isSelfHostedRunner())) { - statusReport.runner_os_release = os3.release(); - } - if (codeQlCliVersion !== void 0) { - statusReport.codeql_version = codeQlCliVersion.version; - } - const imageVersion = process.env["ImageVersion"]; - if (imageVersion) { - statusReport.runner_image_version = imageVersion; - } - return statusReport; - } catch (e) { - logger.warning( - `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` - ); - if (isInTestMode()) { - throw e; - } - return void 0; - } -} -var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; -var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; -async function sendStatusReport(statusReport) { - setJobStatusIfUnsuccessful(statusReport.status); - const statusReportJSON = JSON.stringify(statusReport); - core13.debug(`Sending status report: ${statusReportJSON}`); - if (isInTestMode()) { - core13.debug("In test mode. Status reports are not uploaded."); - return; - } - const nwo = getRepositoryNwo(); - const client = getApiClient(); - try { - await client.request( - "PUT /repos/:owner/:repo/code-scanning/analysis/status", - { - owner: nwo.owner, - repo: nwo.repo, - data: statusReportJSON - } - ); - } catch (e) { - const httpError = asHTTPError(e); - if (httpError !== void 0) { - switch (httpError.status) { - case 403: - if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core13.warning( - `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` - ); - } else { - core13.warning( - `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` - ); - } - return; - case 404: - core13.warning(httpError.message); - return; - case 422: - if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core13.debug(INCOMPATIBLE_MSG); - } else { - core13.debug(OUT_OF_DATE_MSG); - } - return; - } - } - core13.warning( - `An unexpected error occurred when sending a status report: ${getErrorMessage( - e - )}` - ); - } -} -async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { - try { - const statusReport = await createStatusReportBase( - actionName, - "failure", - actionStartedAt, - void 0, - void 0, - logger, - `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, - error3 instanceof Error ? error3.stack : void 0 - ); - if (statusReport !== void 0) { - await sendStatusReport(statusReport); - } - } catch (e) { - logger.warning( - `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` - ); - if (isInTestMode()) { - throw e; - } - } -} - // src/upload-lib.ts var fs15 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -132062,7 +131833,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -132161,13 +131932,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core14.warning(httpError.message || GENERIC_403_MSG); + core13.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core14.warning(httpError.message || GENERIC_404_MSG); + core13.warning(httpError.message || GENERIC_404_MSG); break; default: - core14.warning(httpError.message); + core13.warning(httpError.message); break; } } @@ -132522,7 +132293,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core14.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -132567,7 +132338,7 @@ function filterAlertsByDiffRange(logger, sarif) { var fs16 = __toESM(require("fs")); var path15 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); -var core15 = __toESM(require_core()); +var core14 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -132744,38 +132515,29 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { return uploadResult ? { ...uploadResult.statusReport, sarifID: uploadResult.sarifID } : {}; } async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { - if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] !== "true") { - core16.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ - ); - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled." - }; - } - try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger - ); - } catch (e) { - logger.debug( - `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}` - ); - return createFailedUploadFailedSarifResult(e); - } - } else { - core16.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_SUCCESS" /* SuccessStatus */ - ); + if (!isCodeScanningEnabled(config)) { + return { + upload_failed_run_skipped_because: "Code Scanning is not enabled." + }; + } + if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully" }; } + try { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger + ); + } catch (e) { + logger.debug( + `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}` + ); + return createFailedUploadFailedSarifResult(e); + } } async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, config, repositoryNwo, features, logger) { const uploadFailedSarifResult = await tryUploadSarifIfRunFailed( @@ -132898,12 +132660,233 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) { ); } } -function getFinalJobStatus() { - const jobStatusFromEnvironment = process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */]; - if (!jobStatusFromEnvironment || !Object.values(JobStatus).includes(jobStatusFromEnvironment)) { - return "JOB_STATUS_UNKNOWN" /* UnknownStatus */; + +// src/status-report.ts +var os3 = __toESM(require("os")); +var core15 = __toESM(require_core()); +function isFirstPartyAnalysis(actionName) { + if (actionName !== "upload-sarif" /* UploadSarif */) { + return true; + } + return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; +} +var JobStatus = /* @__PURE__ */ ((JobStatus2) => { + JobStatus2["UnknownStatus"] = "JOB_STATUS_UNKNOWN"; + JobStatus2["SuccessStatus"] = "JOB_STATUS_SUCCESS"; + JobStatus2["FailureStatus"] = "JOB_STATUS_FAILURE"; + JobStatus2["ConfigErrorStatus"] = "JOB_STATUS_CONFIGURATION_ERROR"; + return JobStatus2; +})(JobStatus || {}); +function getActionsStatus(error3, otherFailureCause) { + if (error3 || otherFailureCause) { + return error3 instanceof ConfigurationError ? "user-error" : "failure"; + } else { + return "success"; + } +} +function getJobStatusDisplayName(status) { + switch (status) { + case "JOB_STATUS_SUCCESS" /* SuccessStatus */: + return "success"; + case "JOB_STATUS_FAILURE" /* FailureStatus */: + return "failure"; + case "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */: + return "configuration error"; + case "JOB_STATUS_UNKNOWN" /* UnknownStatus */: + return "unknown"; + default: + assertNever(status); + } +} +function setJobStatusIfUnsuccessful(actionStatus) { + if (actionStatus === "user-error") { + core15.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ + ); + } else if (actionStatus === "failure" || actionStatus === "aborted") { + core15.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ + ); + } +} +async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { + try { + const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; + const ref = await getRef(); + const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; + const workflowRunID = getWorkflowRunID(); + const workflowRunAttempt = getWorkflowRunAttempt(); + const workflowName = process.env["GITHUB_WORKFLOW"] || ""; + const jobName = process.env["GITHUB_JOB"] || ""; + const analysis_key = await getAnalysisKey(); + let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; + if (workflowStartedAt === void 0) { + workflowStartedAt = actionStartedAt.toISOString(); + core15.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + } + const runnerOs = getRequiredEnvParam("RUNNER_OS"); + const codeQlCliVersion = getCachedCodeQlVersion(); + const actionRef = process.env["GITHUB_ACTION_REF"] || ""; + const testingEnvironment = getTestingEnvironment(); + if (testingEnvironment) { + core15.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + } + const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; + const statusReport = { + action_name: actionName, + action_oid: "unknown", + // TODO decide if it's possible to fill this in + action_ref: actionRef, + action_started_at: actionStartedAt.toISOString(), + action_version: getActionVersion(), + analysis_kinds: config?.analysisKinds?.join(","), + analysis_key, + build_mode: config?.buildMode, + commit_oid: commitOid, + first_party_analysis: isFirstPartyAnalysis(actionName), + job_name: jobName, + job_run_uuid: jobRunUUID, + ref, + runner_os: runnerOs, + started_at: workflowStartedAt, + status, + steady_state_default_setup: isSteadyStateDefaultSetupRun, + testing_environment: testingEnvironment || "", + workflow_name: workflowName, + workflow_run_attempt: workflowRunAttempt, + workflow_run_id: workflowRunID + }; + try { + statusReport.actions_event_name = getWorkflowEventName(); + } catch (e) { + logger.warning( + `Could not determine the workflow event name: ${getErrorMessage(e)}.` + ); + } + if (config) { + statusReport.languages = config.languages?.join(","); + } + if (diskInfo) { + statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; + statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; + } + if (cause) { + statusReport.cause = cause; + } + if (exception2) { + statusReport.exception = exception2; + } + if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { + statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); + } + const matrix = getRequiredInput("matrix"); + if (matrix) { + statusReport.matrix_vars = matrix; + } + if ("RUNNER_ARCH" in process.env) { + statusReport.runner_arch = process.env["RUNNER_ARCH"]; + } + if (!(runnerOs === "Linux" && isSelfHostedRunner())) { + statusReport.runner_os_release = os3.release(); + } + if (codeQlCliVersion !== void 0) { + statusReport.codeql_version = codeQlCliVersion.version; + } + const imageVersion = process.env["ImageVersion"]; + if (imageVersion) { + statusReport.runner_image_version = imageVersion; + } + return statusReport; + } catch (e) { + logger.warning( + `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` + ); + if (isInTestMode()) { + throw e; + } + return void 0; + } +} +var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; +var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; +async function sendStatusReport(statusReport) { + setJobStatusIfUnsuccessful(statusReport.status); + const statusReportJSON = JSON.stringify(statusReport); + core15.debug(`Sending status report: ${statusReportJSON}`); + if (isInTestMode()) { + core15.debug("In test mode. Status reports are not uploaded."); + return; + } + const nwo = getRepositoryNwo(); + const client = getApiClient(); + try { + await client.request( + "PUT /repos/:owner/:repo/code-scanning/analysis/status", + { + owner: nwo.owner, + repo: nwo.repo, + data: statusReportJSON + } + ); + } catch (e) { + const httpError = asHTTPError(e); + if (httpError !== void 0) { + switch (httpError.status) { + case 403: + if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { + core15.warning( + `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` + ); + } else { + core15.warning( + `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` + ); + } + return; + case 404: + core15.warning(httpError.message); + return; + case 422: + if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { + core15.debug(INCOMPATIBLE_MSG); + } else { + core15.debug(OUT_OF_DATE_MSG); + } + return; + } + } + core15.warning( + `An unexpected error occurred when sending a status report: ${getErrorMessage( + e + )}` + ); + } +} +async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { + try { + const statusReport = await createStatusReportBase( + actionName, + "failure", + actionStartedAt, + void 0, + void 0, + logger, + `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, + error3 instanceof Error ? error3.stack : void 0 + ); + if (statusReport !== void 0) { + await sendStatusReport(statusReport); + } + } catch (e) { + logger.warning( + `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` + ); + if (isInTestMode()) { + throw e; + } } - return jobStatusFromEnvironment; } // src/init-action-post.ts @@ -132945,7 +132928,7 @@ async function run2(startedAt) { } } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core17.setFailed(error3.message); + core16.setFailed(error3.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error3), @@ -132961,7 +132944,7 @@ async function run2(startedAt) { } return; } - const jobStatus = getFinalJobStatus(); + const jobStatus = getFinalJobStatus(config); logger.info(`CodeQL job status was ${getJobStatusDisplayName(jobStatus)}.`); const statusReportBase = await createStatusReportBase( "init-post" /* InitPost */, @@ -132975,7 +132958,7 @@ async function run2(startedAt) { const statusReport = { ...statusReportBase, ...uploadFailedSarifResult, - job_status: getFinalJobStatus(), + job_status: jobStatus, dependency_caching_usage: dependencyCachingUsage }; logger.info("Sending status report for init-post step."); @@ -132983,13 +132966,33 @@ async function run2(startedAt) { logger.info("Status report sent for init-post step."); } } +function getFinalJobStatus(config) { + const jobStatusFromEnvironment = process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */]; + if (jobStatusFromEnvironment !== void 0) { + if (Object.values(JobStatus).includes(jobStatusFromEnvironment)) { + return jobStatusFromEnvironment; + } + return "JOB_STATUS_UNKNOWN" /* UnknownStatus */; + } + let jobStatus; + if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { + core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, "JOB_STATUS_SUCCESS" /* SuccessStatus */); + jobStatus = "JOB_STATUS_SUCCESS" /* SuccessStatus */; + } else if (config !== void 0) { + jobStatus = "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */; + } else { + jobStatus = "JOB_STATUS_UNKNOWN" /* UnknownStatus */; + } + core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, jobStatus); + return jobStatus; +} async function runWrapper() { const startedAt = /* @__PURE__ */ new Date(); const logger = getActionsLogger(); try { await run2(startedAt); } catch (error3) { - core17.setFailed(`init post action failed: ${wrapError(error3).message}`); + core16.setFailed(`init post action failed: ${wrapError(error3).message}`); await sendUnhandledErrorStatusReport( "init-post" /* InitPost */, startedAt, diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 68bfda9a29..dfe2606468 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -1,6 +1,5 @@ import * as fs from "fs"; -import * as core from "@actions/core"; import * as github from "@actions/github"; import * as actionsUtil from "./actions-util"; @@ -129,48 +128,32 @@ export async function tryUploadSarifIfRunFailed( features: FeatureEnablement, logger: Logger, ): Promise { - if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") { - // If analyze didn't complete successfully and the job status hasn't - // already been set to Failure/ConfigurationError previously, this - // means that something along the way failed in a step that is not - // owned by the Action, for example a manual build step. We - // consider this a configuration error. - core.exportVariable( - EnvVar.JOB_STATUS, - process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus, - ); - - // If the only enabled analysis kind is `code-quality`, then we shouldn't - // upload the failed SARIF to Code Scanning. - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled.", - }; - } - - try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger, - ); - } catch (e) { - logger.debug( - `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`, - ); - return createFailedUploadFailedSarifResult(e); - } - } else { - core.exportVariable( - EnvVar.JOB_STATUS, - process.env[EnvVar.JOB_STATUS] ?? JobStatus.SuccessStatus, - ); + // If the only enabled analysis kind is `code-quality`, then we shouldn't + // upload the failed SARIF to Code Scanning. + if (!isCodeScanningEnabled(config)) { + return { + upload_failed_run_skipped_because: "Code Scanning is not enabled.", + }; + } + if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully", }; } + try { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger, + ); + } catch (e) { + logger.debug( + `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`, + ); + return createFailedUploadFailedSarifResult(e); + } } export async function run( @@ -335,20 +318,3 @@ async function removeUploadedSarif( ); } } - -/** - * Returns the final job status sent in the `init-post` Action, based on the - * current value of the JOB_STATUS environment variable. If the variable is - * unset, or if its value is not one of the JobStatus enum values, returns - * Unknown. Otherwise it returns the status set in the environment variable. - */ -export function getFinalJobStatus(): JobStatus { - const jobStatusFromEnvironment = process.env[EnvVar.JOB_STATUS]; - if ( - !jobStatusFromEnvironment || - !Object.values(JobStatus).includes(jobStatusFromEnvironment as JobStatus) - ) { - return JobStatus.UnknownStatus; - } - return jobStatusFromEnvironment as JobStatus; -} diff --git a/src/init-action-post.ts b/src/init-action-post.ts index 02a856862a..340e01b790 100644 --- a/src/init-action-post.ts +++ b/src/init-action-post.ts @@ -14,12 +14,13 @@ import { import { getGitHubVersion } from "./api-client"; import { CachingKind } from "./caching-utils"; import { getCodeQL } from "./codeql"; -import { Config, getConfig } from "./config-utils"; +import { type Config, getConfig } from "./config-utils"; import * as debugArtifacts from "./debug-artifacts"; import { DependencyCachingUsageReport, getDependencyCacheUsage, } from "./dependency-caching"; +import { EnvVar } from "./environment"; import { Features } from "./feature-flags"; import * as gitUtils from "./git-utils"; import * as initActionPostHelper from "./init-action-post-helper"; @@ -33,6 +34,7 @@ import { getActionsStatus, ActionName, getJobStatusDisplayName, + JobStatus, } from "./status-report"; import { checkDiskUsage, checkGitHubVersionInRange, wrapError } from "./util"; @@ -85,7 +87,7 @@ async function run(startedAt: Date) { logger, ); - // If we are analysing the default branch and some kind of caching is enabled, + // If we are analyzing the default branch and some kind of caching is enabled, // then try to determine our overall cache usage for dependency caches. We only // do this under these circumstances to avoid slowing down analyses for PRs // and where caching may not be enabled. @@ -115,7 +117,7 @@ async function run(startedAt: Date) { } return; } - const jobStatus = initActionPostHelper.getFinalJobStatus(); + const jobStatus = getFinalJobStatus(config); logger.info(`CodeQL job status was ${getJobStatusDisplayName(jobStatus)}.`); const statusReportBase = await createStatusReportBase( @@ -130,7 +132,7 @@ async function run(startedAt: Date) { const statusReport: InitPostStatusReport = { ...statusReportBase, ...uploadFailedSarifResult, - job_status: initActionPostHelper.getFinalJobStatus(), + job_status: jobStatus, dependency_caching_usage: dependencyCachingUsage, }; logger.info("Sending status report for init-post step."); @@ -139,6 +141,51 @@ async function run(startedAt: Date) { } } +function getFinalJobStatus(config: Config | undefined): JobStatus { + const jobStatusFromEnvironment = process.env[EnvVar.JOB_STATUS]; + + if (jobStatusFromEnvironment !== undefined) { + // Validate the job status from the environment. If it is invalid, return unknown. + if ( + Object.values(JobStatus).includes(jobStatusFromEnvironment as JobStatus) + ) { + return jobStatusFromEnvironment as JobStatus; + } + return JobStatus.UnknownStatus; + } + + let jobStatus: JobStatus; + + if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") { + core.exportVariable(EnvVar.JOB_STATUS, JobStatus.SuccessStatus); + jobStatus = JobStatus.SuccessStatus; + } else if (config !== undefined) { + // - We have computed a CodeQL config + // - Analyze didn't complete successfully + // - The job status hasn't already been set to Failure/ConfigurationError + // + // This means that something along the way failed in a step that is not + // owned by the Action, for example a manual build step. We consider this a + // configuration error. + jobStatus = JobStatus.ConfigErrorStatus; + } else { + // If we didn't manage to compute a CodeQL config, it is unclear at this + // point why the analyze Action didn't complete. + // - One possibility is that the workflow run was cancelled. We could + // consider determining workflow cancellation using the GitHub API, but + // for now we treat all these cases as unknown. + // - Another possibility is that we're running a workflow that only runs + // `init`, for instance a workflow that was created before `setup-codeql` + // was available and uses `init` just to set up the CodeQL tools. + jobStatus = JobStatus.UnknownStatus; + } + + // This shouldn't be necessary, but in the odd case that we run more than one + // `init` post step, ensure the job status is consistent between them. + core.exportVariable(EnvVar.JOB_STATUS, jobStatus); + return jobStatus; +} + async function runWrapper() { const startedAt = new Date(); const logger = getActionsLogger();