diff --git a/.github/workflows/__quality-queries.yml b/.github/workflows/__quality-queries.yml index c4aa5ffaf1..d010153169 100644 --- a/.github/workflows/__quality-queries.yml +++ b/.github/workflows/__quality-queries.yml @@ -80,6 +80,7 @@ jobs: with: output: ${{ runner.temp }}/results upload-database: false + post-processed-sarif-path: ${{ runner.temp }}/post-processed - name: Upload security SARIF if: contains(matrix.analysis-kinds, 'code-scanning') uses: actions/upload-artifact@v4 @@ -96,6 +97,14 @@ jobs: quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json path: ${{ runner.temp }}/results/javascript.quality.sarif retention-days: 7 + - name: Upload post-processed SARIF + uses: actions/upload-artifact@v4 + with: + name: | + post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json + path: ${{ runner.temp }}/post-processed + retention-days: 7 + if-no-files-found: error - name: Check quality query does not appear in security SARIF if: contains(matrix.analysis-kinds, 'code-scanning') uses: actions/github-script@v8 diff --git a/analyze/action.yml b/analyze/action.yml index 7fc118b156..fd6719df47 100644 --- a/analyze/action.yml +++ b/analyze/action.yml @@ -6,7 +6,7 @@ inputs: description: The name of the check run to add text to. required: false output: - description: The path of the directory in which to save the SARIF results + description: The path of the directory in which to save the SARIF results from the CodeQL CLI. required: false default: "../results" upload: @@ -70,6 +70,12 @@ inputs: description: Whether to upload the resulting CodeQL database required: false default: "true" + post-processed-sarif-path: + description: >- + Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing + on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an + argument for this input, they are written to the specified directory. + required: false wait-for-processing: description: If true, the Action will wait for the uploaded SARIF to be processed before completing. required: true diff --git a/lib/analyze-action.js b/lib/analyze-action.js index f98a421a6f..7a8bf55fbd 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -95900,26 +95900,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { - const sarifPaths = getSarifFilePaths( - inputSarifPath, - uploadTarget.sarifPredicate - ); - return uploadSpecifiedFiles( - sarifPaths, - checkoutPath, - category, - features, - logger, - uploadTarget - ); -} -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { - logger.startGroup(`Uploading ${uploadTarget.name} results`); - logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`); +async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { + logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; - category = uploadTarget.fixCategory(logger, category); + category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { const parsedSarif = readSarifFile(sarifPath); @@ -95947,28 +95932,72 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features analysisKey, environment ); + return { sarif, analysisKey, environment }; +} +async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { + const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; + if (outputPath !== void 0 && outputPath.trim() !== "") { + dumpSarifFile( + JSON.stringify(postProcessingResults.sarif), + outputPath, + logger, + uploadTarget + ); + } else { + logger.debug(`Not writing post-processed SARIF files.`); + } +} +async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { + const sarifPaths = getSarifFilePaths( + inputSarifPath, + uploadTarget.sarifPredicate + ); + return uploadSpecifiedFiles( + sarifPaths, + checkoutPath, + category, + features, + logger, + uploadTarget + ); +} +async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { + const processingResults = await postProcessSarifFiles( + logger, + features, + checkoutPath, + sarifPaths, + category, + uploadTarget + ); + return uploadPostProcessedFiles( + logger, + checkoutPath, + uploadTarget, + processingResults + ); +} +async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { + logger.startGroup(`Uploading ${uploadTarget.name} results`); + const sarif = postProcessingResults.sarif; const toolNames = getToolNames(sarif); logger.debug(`Validating that each SARIF run has a unique category`); validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); - const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; - if (dumpDir) { - dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget); - } logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), await getRef(), - analysisKey, + postProcessingResults.analysisKey, getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, getWorkflowRunID(), getWorkflowRunAttempt(), checkoutURI, - environment, + postProcessingResults.environment, toolNames, await determineBaseBranchHeadCommitOid() ); @@ -95999,14 +96028,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { fs18.mkdirSync(outputDir, { recursive: true }); } else if (!fs18.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( - `The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}` + `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); } const outputFile = path18.resolve( outputDir, `upload${uploadTarget.sarifExtension}` ); - logger.info(`Dumping processed SARIF file to ${outputFile}`); + logger.info(`Writing processed SARIF file to ${outputFile}`); fs18.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; @@ -96162,7 +96191,7 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts -async function uploadSarif(logger, features, checkoutPath, sarifPath, category) { +async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( logger, sarifPath @@ -96172,14 +96201,28 @@ async function uploadSarif(logger, features, checkoutPath, sarifPath, category) sarifGroups )) { const analysisConfig = getAnalysisConfig(analysisKind); - uploadResults[analysisKind] = await uploadSpecifiedFiles( - sarifFiles, + const postProcessingResults = await postProcessSarifFiles( + logger, + features, checkoutPath, + sarifFiles, category, - features, - logger, analysisConfig ); + await writePostProcessedFiles( + logger, + postProcessedOutputPath, + analysisConfig, + postProcessingResults + ); + if (uploadKind === "always") { + uploadResults[analysisKind] = await uploadPostProcessedFiles( + logger, + checkoutPath, + analysisConfig, + postProcessingResults + ); + } } return uploadResults; } @@ -96378,19 +96421,23 @@ async function run() { } core14.setOutput("db-locations", dbLocations); core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); - const uploadInput = getOptionalInput("upload"); - if (runStats && getUploadValue(uploadInput) === "always") { + const uploadKind = getUploadValue( + getOptionalInput("upload") + ); + if (runStats) { const checkoutPath = getRequiredInput("checkout_path"); const category = getOptionalInput("category"); if (await features.getValue("analyze_use_new_upload" /* AnalyzeUseNewUpload */)) { - uploadResults = await uploadSarif( + uploadResults = await postProcessAndUploadSarif( logger, features, + uploadKind, checkoutPath, outputDir, - category + category, + getOptionalInput("post-processed-sarif-path") ); - } else { + } else if (uploadKind === "always") { uploadResults = {}; if (isCodeScanningEnabled(config)) { uploadResults["code-scanning" /* CodeScanning */] = await uploadFiles( @@ -96412,6 +96459,9 @@ async function run() { CodeQuality ); } + } else { + uploadResults = {}; + logger.info("Not uploading results"); } if (uploadResults["code-scanning" /* CodeScanning */] !== void 0) { core14.setOutput( diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 6fdfc28db8..a7bd4c7601 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -133311,26 +133311,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { - const sarifPaths = getSarifFilePaths( - inputSarifPath, - uploadTarget.sarifPredicate - ); - return uploadSpecifiedFiles( - sarifPaths, - checkoutPath, - category, - features, - logger, - uploadTarget - ); -} -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { - logger.startGroup(`Uploading ${uploadTarget.name} results`); - logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`); +async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { + logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; - category = uploadTarget.fixCategory(logger, category); + category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { const parsedSarif = readSarifFile(sarifPath); @@ -133358,28 +133343,59 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features analysisKey, environment ); + return { sarif, analysisKey, environment }; +} +async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { + const sarifPaths = getSarifFilePaths( + inputSarifPath, + uploadTarget.sarifPredicate + ); + return uploadSpecifiedFiles( + sarifPaths, + checkoutPath, + category, + features, + logger, + uploadTarget + ); +} +async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { + const processingResults = await postProcessSarifFiles( + logger, + features, + checkoutPath, + sarifPaths, + category, + uploadTarget + ); + return uploadPostProcessedFiles( + logger, + checkoutPath, + uploadTarget, + processingResults + ); +} +async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { + logger.startGroup(`Uploading ${uploadTarget.name} results`); + const sarif = postProcessingResults.sarif; const toolNames = getToolNames(sarif); logger.debug(`Validating that each SARIF run has a unique category`); validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); - const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; - if (dumpDir) { - dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget); - } logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), await getRef(), - analysisKey, + postProcessingResults.analysisKey, getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, getWorkflowRunID(), getWorkflowRunAttempt(), checkoutURI, - environment, + postProcessingResults.environment, toolNames, await determineBaseBranchHeadCommitOid() ); @@ -133405,21 +133421,6 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features sarifID }; } -function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs17.existsSync(outputDir)) { - fs17.mkdirSync(outputDir, { recursive: true }); - } else if (!fs17.lstatSync(outputDir).isDirectory()) { - throw new ConfigurationError( - `The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}` - ); - } - const outputFile = path17.resolve( - outputDir, - `upload${uploadTarget.sarifExtension}` - ); - logger.info(`Dumping processed SARIF file to ${outputFile}`); - fs17.writeFileSync(outputFile, sarifPayload); -} var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; async function waitForProcessing(repositoryNwo, sarifID, logger, options = { diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 490fabfa8e..e194674423 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -84847,6 +84847,7 @@ __export(upload_lib_exports, { getGroupedSarifFilePaths: () => getGroupedSarifFilePaths, getSarifFilePaths: () => getSarifFilePaths, populateRunAutomationDetails: () => populateRunAutomationDetails, + postProcessSarifFiles: () => postProcessSarifFiles, readSarifFile: () => readSarifFile, shouldConsiderConfigurationError: () => shouldConsiderConfigurationError, shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest, @@ -84854,10 +84855,11 @@ __export(upload_lib_exports, { throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled, uploadFiles: () => uploadFiles, uploadPayload: () => uploadPayload, - uploadSpecifiedFiles: () => uploadSpecifiedFiles, + uploadPostProcessedFiles: () => uploadPostProcessedFiles, validateSarifFileSchema: () => validateSarifFileSchema, validateUniqueCategory: () => validateUniqueCategory, - waitForProcessing: () => waitForProcessing + waitForProcessing: () => waitForProcessing, + writePostProcessedFiles: () => writePostProcessedFiles }); module.exports = __toCommonJS(upload_lib_exports); var fs13 = __toESM(require("fs")); @@ -92712,26 +92714,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { - const sarifPaths = getSarifFilePaths( - inputSarifPath, - uploadTarget.sarifPredicate - ); - return uploadSpecifiedFiles( - sarifPaths, - checkoutPath, - category, - features, - logger, - uploadTarget - ); -} -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { - logger.startGroup(`Uploading ${uploadTarget.name} results`); - logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`); +async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { + logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; - category = uploadTarget.fixCategory(logger, category); + category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { const parsedSarif = readSarifFile(sarifPath); @@ -92759,28 +92746,72 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features analysisKey, environment ); + return { sarif, analysisKey, environment }; +} +async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { + const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; + if (outputPath !== void 0 && outputPath.trim() !== "") { + dumpSarifFile( + JSON.stringify(postProcessingResults.sarif), + outputPath, + logger, + uploadTarget + ); + } else { + logger.debug(`Not writing post-processed SARIF files.`); + } +} +async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { + const sarifPaths = getSarifFilePaths( + inputSarifPath, + uploadTarget.sarifPredicate + ); + return uploadSpecifiedFiles( + sarifPaths, + checkoutPath, + category, + features, + logger, + uploadTarget + ); +} +async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { + const processingResults = await postProcessSarifFiles( + logger, + features, + checkoutPath, + sarifPaths, + category, + uploadTarget + ); + return uploadPostProcessedFiles( + logger, + checkoutPath, + uploadTarget, + processingResults + ); +} +async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { + logger.startGroup(`Uploading ${uploadTarget.name} results`); + const sarif = postProcessingResults.sarif; const toolNames = getToolNames(sarif); logger.debug(`Validating that each SARIF run has a unique category`); validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); - const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; - if (dumpDir) { - dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget); - } logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), await getRef(), - analysisKey, + postProcessingResults.analysisKey, getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, getWorkflowRunID(), getWorkflowRunAttempt(), checkoutURI, - environment, + postProcessingResults.environment, toolNames, await determineBaseBranchHeadCommitOid() ); @@ -92811,14 +92842,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { fs13.mkdirSync(outputDir, { recursive: true }); } else if (!fs13.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( - `The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}` + `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); } const outputFile = path14.resolve( outputDir, `upload${uploadTarget.sarifExtension}` ); - logger.info(`Dumping processed SARIF file to ${outputFile}`); + logger.info(`Writing processed SARIF file to ${outputFile}`); fs13.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; @@ -92980,6 +93011,7 @@ function filterAlertsByDiffRange(logger, sarif) { getGroupedSarifFilePaths, getSarifFilePaths, populateRunAutomationDetails, + postProcessSarifFiles, readSarifFile, shouldConsiderConfigurationError, shouldConsiderInvalidRequest, @@ -92987,10 +93019,11 @@ function filterAlertsByDiffRange(logger, sarif) { throwIfCombineSarifFilesDisabled, uploadFiles, uploadPayload, - uploadSpecifiedFiles, + uploadPostProcessedFiles, validateSarifFileSchema, validateUniqueCategory, - waitForProcessing + waitForProcessing, + writePostProcessedFiles }); /*! Bundled license information: diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 79d0f16926..d91b83b426 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -93370,12 +93370,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { - logger.startGroup(`Uploading ${uploadTarget.name} results`); - logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`); +async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { + logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; - category = uploadTarget.fixCategory(logger, category); + category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { const parsedSarif = readSarifFile(sarifPath); @@ -93403,28 +93402,42 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features analysisKey, environment ); + return { sarif, analysisKey, environment }; +} +async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { + const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; + if (outputPath !== void 0 && outputPath.trim() !== "") { + dumpSarifFile( + JSON.stringify(postProcessingResults.sarif), + outputPath, + logger, + uploadTarget + ); + } else { + logger.debug(`Not writing post-processed SARIF files.`); + } +} +async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { + logger.startGroup(`Uploading ${uploadTarget.name} results`); + const sarif = postProcessingResults.sarif; const toolNames = getToolNames(sarif); logger.debug(`Validating that each SARIF run has a unique category`); validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); - const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */]; - if (dumpDir) { - dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget); - } logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), await getRef(), - analysisKey, + postProcessingResults.analysisKey, getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, getWorkflowRunID(), getWorkflowRunAttempt(), checkoutURI, - environment, + postProcessingResults.environment, toolNames, await determineBaseBranchHeadCommitOid() ); @@ -93455,14 +93468,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { fs14.mkdirSync(outputDir, { recursive: true }); } else if (!fs14.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( - `The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}` + `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); } const outputFile = path15.resolve( outputDir, `upload${uploadTarget.sarifExtension}` ); - logger.info(`Dumping processed SARIF file to ${outputFile}`); + logger.info(`Writing processed SARIF file to ${outputFile}`); fs14.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; @@ -93618,7 +93631,7 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts -async function uploadSarif(logger, features, checkoutPath, sarifPath, category) { +async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( logger, sarifPath @@ -93628,14 +93641,28 @@ async function uploadSarif(logger, features, checkoutPath, sarifPath, category) sarifGroups )) { const analysisConfig = getAnalysisConfig(analysisKind); - uploadResults[analysisKind] = await uploadSpecifiedFiles( - sarifFiles, + const postProcessingResults = await postProcessSarifFiles( + logger, + features, checkoutPath, + sarifFiles, category, - features, - logger, analysisConfig ); + await writePostProcessedFiles( + logger, + postProcessedOutputPath, + analysisConfig, + postProcessingResults + ); + if (uploadKind === "always") { + uploadResults[analysisKind] = await uploadPostProcessedFiles( + logger, + checkoutPath, + analysisConfig, + postProcessingResults + ); + } } return uploadResults; } @@ -93687,9 +93714,10 @@ async function run() { const sarifPath = getRequiredInput("sarif_file"); const checkoutPath = getRequiredInput("checkout_path"); const category = getOptionalInput("category"); - const uploadResults = await uploadSarif( + const uploadResults = await postProcessAndUploadSarif( logger, features, + "always", checkoutPath, sarifPath, category diff --git a/pr-checks/checks/quality-queries.yml b/pr-checks/checks/quality-queries.yml index b8420ad209..ec88e44b30 100644 --- a/pr-checks/checks/quality-queries.yml +++ b/pr-checks/checks/quality-queries.yml @@ -36,6 +36,7 @@ steps: with: output: "${{ runner.temp }}/results" upload-database: false + post-processed-sarif-path: "${{ runner.temp }}/post-processed" - name: Upload security SARIF if: contains(matrix.analysis-kinds, 'code-scanning') uses: actions/upload-artifact@v4 @@ -52,6 +53,14 @@ steps: quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json path: "${{ runner.temp }}/results/javascript.quality.sarif" retention-days: 7 + - name: Upload post-processed SARIF + uses: actions/upload-artifact@v4 + with: + name: | + post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json + path: "${{ runner.temp }}/post-processed" + retention-days: 7 + if-no-files-found: error - name: Check quality query does not appear in security SARIF if: contains(matrix.analysis-kinds, 'code-scanning') uses: actions/github-script@v8 diff --git a/src/analyze-action.ts b/src/analyze-action.ts index b6aa05ebec..9ba010855b 100644 --- a/src/analyze-action.ts +++ b/src/analyze-action.ts @@ -52,7 +52,7 @@ import { } from "./trap-caching"; import * as uploadLib from "./upload-lib"; import { UploadResult } from "./upload-lib"; -import { uploadSarif } from "./upload-sarif"; +import { postProcessAndUploadSarif } from "./upload-sarif"; import * as util from "./util"; interface AnalysisStatusReport @@ -344,20 +344,24 @@ async function run() { } core.setOutput("db-locations", dbLocations); core.setOutput("sarif-output", path.resolve(outputDir)); - const uploadInput = actionsUtil.getOptionalInput("upload"); - if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") { + const uploadKind = actionsUtil.getUploadValue( + actionsUtil.getOptionalInput("upload"), + ); + if (runStats) { const checkoutPath = actionsUtil.getRequiredInput("checkout_path"); const category = actionsUtil.getOptionalInput("category"); if (await features.getValue(Feature.AnalyzeUseNewUpload)) { - uploadResults = await uploadSarif( + uploadResults = await postProcessAndUploadSarif( logger, features, + uploadKind, checkoutPath, outputDir, category, + actionsUtil.getOptionalInput("post-processed-sarif-path"), ); - } else { + } else if (uploadKind === "always") { uploadResults = {}; if (isCodeScanningEnabled(config)) { @@ -383,6 +387,9 @@ async function run() { analyses.CodeQuality, ); } + } else { + uploadResults = {}; + logger.info("Not uploading results"); } // Set the SARIF id outputs only if we have results for them, to avoid diff --git a/src/upload-lib.ts b/src/upload-lib.ts index b765cd3977..573ca670cb 100644 --- a/src/upload-lib.ts +++ b/src/upload-lib.ts @@ -688,51 +688,39 @@ export function buildPayload( return payloadObj; } -/** - * Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers - * to. - */ -export async function uploadFiles( - inputSarifPath: string, - checkoutPath: string, - category: string | undefined, - features: FeatureEnablement, - logger: Logger, - uploadTarget: analyses.AnalysisConfig, -): Promise { - const sarifPaths = getSarifFilePaths( - inputSarifPath, - uploadTarget.sarifPredicate, - ); - - return uploadSpecifiedFiles( - sarifPaths, - checkoutPath, - category, - features, - logger, - uploadTarget, - ); +export interface PostProcessingResults { + sarif: util.SarifFile; + analysisKey: string; + environment: string; } /** - * Uploads the given array of SARIF files. + * Performs post-processing of the SARIF files given by `sarifPaths`. + * + * @param logger The logger to use. + * @param features Information about enabled features. + * @param checkoutPath The path where the repo was checked out at. + * @param sarifPaths The paths of the SARIF files to post-process. + * @param category The analysis category. + * @param analysis The analysis configuration. + * + * @returns Returns the results of post-processing the SARIF files, + * including the resulting SARIF file. */ -export async function uploadSpecifiedFiles( - sarifPaths: string[], +export async function postProcessSarifFiles( + logger: Logger, + features: FeatureEnablement, checkoutPath: string, + sarifPaths: string[], category: string | undefined, - features: FeatureEnablement, - logger: Logger, - uploadTarget: analyses.AnalysisConfig, -): Promise { - logger.startGroup(`Uploading ${uploadTarget.name} results`); - logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`); + analysis: analyses.AnalysisConfig, +): Promise { + logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif: SarifFile; - category = uploadTarget.fixCategory(logger, category); + category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { // Validate that the files we were asked to upload are all valid SARIF files @@ -768,6 +756,113 @@ export async function uploadSpecifiedFiles( environment, ); + return { sarif, analysisKey, environment }; +} + +/** + * Writes the post-processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`. + * + * @param logger The logger to use. + * @param pathInput The input provided for `post-processed-sarif-path`. + * @param uploadTarget The upload target. + * @param processingResults The results of post-processing SARIF files. + */ +export async function writePostProcessedFiles( + logger: Logger, + pathInput: string | undefined, + uploadTarget: analyses.AnalysisConfig, + postProcessingResults: PostProcessingResults, +) { + // If there's an explicit input, use that. Otherwise, use the value from the environment variable. + const outputPath = pathInput || process.env[EnvVar.SARIF_DUMP_DIR]; + + // If we have a non-empty output path, write the SARIF file to it. + if (outputPath !== undefined && outputPath.trim() !== "") { + dumpSarifFile( + JSON.stringify(postProcessingResults.sarif), + outputPath, + logger, + uploadTarget, + ); + } else { + logger.debug(`Not writing post-processed SARIF files.`); + } +} + +/** + * Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers + * to. + */ +export async function uploadFiles( + inputSarifPath: string, + checkoutPath: string, + category: string | undefined, + features: FeatureEnablement, + logger: Logger, + uploadTarget: analyses.AnalysisConfig, +): Promise { + const sarifPaths = getSarifFilePaths( + inputSarifPath, + uploadTarget.sarifPredicate, + ); + + return uploadSpecifiedFiles( + sarifPaths, + checkoutPath, + category, + features, + logger, + uploadTarget, + ); +} + +/** + * Uploads the given array of SARIF files. + */ +async function uploadSpecifiedFiles( + sarifPaths: string[], + checkoutPath: string, + category: string | undefined, + features: FeatureEnablement, + logger: Logger, + uploadTarget: analyses.AnalysisConfig, +): Promise { + const processingResults: PostProcessingResults = await postProcessSarifFiles( + logger, + features, + checkoutPath, + sarifPaths, + category, + uploadTarget, + ); + + return uploadPostProcessedFiles( + logger, + checkoutPath, + uploadTarget, + processingResults, + ); +} + +/** + * Uploads the results of post-processing SARIF files to the specified upload target. + * + * @param logger The logger to use. + * @param checkoutPath The path at which the repository was checked out. + * @param uploadTarget The analysis configuration. + * @param postProcessingResults The results of post-processing SARIF files. + * + * @returns The results of uploading the `postProcessingResults` to `uploadTarget`. + */ +export async function uploadPostProcessedFiles( + logger: Logger, + checkoutPath: string, + uploadTarget: analyses.AnalysisConfig, + postProcessingResults: PostProcessingResults, +): Promise { + logger.startGroup(`Uploading ${uploadTarget.name} results`); + + const sarif = postProcessingResults.sarif; const toolNames = util.getToolNames(sarif); logger.debug(`Validating that each SARIF run has a unique category`); @@ -775,11 +870,6 @@ export async function uploadSpecifiedFiles( logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); - const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR]; - if (dumpDir) { - dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget); - } - logger.debug(`Compressing serialized SARIF`); const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; @@ -787,13 +877,13 @@ export async function uploadSpecifiedFiles( const payload = buildPayload( await gitUtils.getCommitOid(checkoutPath), await gitUtils.getRef(), - analysisKey, + postProcessingResults.analysisKey, util.getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, - environment, + postProcessingResults.environment, toolNames, await gitUtils.determineBaseBranchHeadCommitOid(), ); @@ -839,14 +929,14 @@ function dumpSarifFile( fs.mkdirSync(outputDir, { recursive: true }); } else if (!fs.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( - `The path specified by the ${EnvVar.SARIF_DUMP_DIR} environment variable exists and is not a directory: ${outputDir}`, + `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`, ); } const outputFile = path.resolve( outputDir, `upload${uploadTarget.sarifExtension}`, ); - logger.info(`Dumping processed SARIF file to ${outputFile}`); + logger.info(`Writing processed SARIF file to ${outputFile}`); fs.writeFileSync(outputFile, sarifPayload); } diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index a2ef43eb44..338c9b6dc3 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -16,7 +16,7 @@ import { isThirdPartyAnalysis, } from "./status-report"; import * as upload_lib from "./upload-lib"; -import { uploadSarif } from "./upload-sarif"; +import { postProcessAndUploadSarif } from "./upload-sarif"; import { ConfigurationError, checkActionVersion, @@ -90,9 +90,10 @@ async function run() { const checkoutPath = actionsUtil.getRequiredInput("checkout_path"); const category = actionsUtil.getOptionalInput("category"); - const uploadResults = await uploadSarif( + const uploadResults = await postProcessAndUploadSarif( logger, features, + "always", checkoutPath, sarifPath, category, diff --git a/src/upload-sarif.test.ts b/src/upload-sarif.test.ts index 893330eda6..d32c0c0312 100644 --- a/src/upload-sarif.test.ts +++ b/src/upload-sarif.test.ts @@ -9,7 +9,7 @@ import { getRunnerLogger } from "./logging"; import { createFeatures, setupTests } from "./testing-utils"; import { UploadResult } from "./upload-lib"; import * as uploadLib from "./upload-lib"; -import { uploadSarif } from "./upload-sarif"; +import { postProcessAndUploadSarif } from "./upload-sarif"; import * as util from "./util"; setupTests(test); @@ -19,7 +19,27 @@ interface UploadSarifExpectedResult { expectedFiles?: string[]; } -const uploadSarifMacro = test.macro({ +function mockPostProcessSarifFiles() { + const postProcessSarifFiles = sinon.stub(uploadLib, "postProcessSarifFiles"); + + for (const analysisKind of Object.values(AnalysisKind)) { + const analysisConfig = getAnalysisConfig(analysisKind); + postProcessSarifFiles + .withArgs( + sinon.match.any, + sinon.match.any, + sinon.match.any, + sinon.match.any, + sinon.match.any, + analysisConfig, + ) + .resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" }); + } + + return postProcessSarifFiles; +} + +const postProcessAndUploadSarifMacro = test.macro({ exec: async ( t: ExecutionContext, sarifFiles: string[], @@ -33,21 +53,16 @@ const uploadSarifMacro = test.macro({ const toFullPath = (filename: string) => path.join(tempDir, filename); - const uploadSpecifiedFiles = sinon.stub( + const postProcessSarifFiles = mockPostProcessSarifFiles(); + const uploadPostProcessedFiles = sinon.stub( uploadLib, - "uploadSpecifiedFiles", + "uploadPostProcessedFiles", ); for (const analysisKind of Object.values(AnalysisKind)) { - uploadSpecifiedFiles - .withArgs( - sinon.match.any, - sinon.match.any, - sinon.match.any, - features, - logger, - getAnalysisConfig(analysisKind), - ) + const analysisConfig = getAnalysisConfig(analysisKind); + uploadPostProcessedFiles + .withArgs(logger, sinon.match.any, analysisConfig, sinon.match.any) .resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult); } @@ -56,53 +71,57 @@ const uploadSarifMacro = test.macro({ fs.writeFileSync(sarifFile, ""); } - const actual = await uploadSarif(logger, features, "", testPath); + const actual = await postProcessAndUploadSarif( + logger, + features, + "always", + "", + testPath, + ); for (const analysisKind of Object.values(AnalysisKind)) { const analysisKindResult = expectedResult[analysisKind]; if (analysisKindResult) { // We are expecting a result for this analysis kind, check that we have it. t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult); - // Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths + // Additionally, check that the mocked `postProcessSarifFiles` was called with only the file paths // that we expected it to be called with. t.assert( - uploadSpecifiedFiles.calledWith( + postProcessSarifFiles.calledWith( + logger, + features, + sinon.match.any, analysisKindResult.expectedFiles?.map(toFullPath) ?? fullSarifPaths, sinon.match.any, - sinon.match.any, - features, - logger, getAnalysisConfig(analysisKind), ), ); } else { // Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined` - // is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this + // is also returned by our mocked `uploadProcessedFiles` when there is no expected result for this // analysis kind. t.is(actual[analysisKind], undefined); - // Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind. + // Therefore, we also check that the mocked `uploadProcessedFiles` was not called for this analysis kind. t.assert( - !uploadSpecifiedFiles.calledWith( - sinon.match.any, - sinon.match.any, - sinon.match.any, - features, + !uploadPostProcessedFiles.calledWith( logger, + sinon.match.any, getAnalysisConfig(analysisKind), + sinon.match.any, ), - `uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`, + `uploadProcessedFiles was called for ${analysisKind}, but should not have been.`, ); } } }); }, - title: (providedTitle = "") => `uploadSarif - ${providedTitle}`, + title: (providedTitle = "") => `processAndUploadSarif - ${providedTitle}`, }); test( "SARIF file", - uploadSarifMacro, + postProcessAndUploadSarifMacro, ["test.sarif"], (tempDir) => path.join(tempDir, "test.sarif"), { @@ -117,7 +136,7 @@ test( test( "JSON file", - uploadSarifMacro, + postProcessAndUploadSarifMacro, ["test.json"], (tempDir) => path.join(tempDir, "test.json"), { @@ -132,7 +151,7 @@ test( test( "Code Scanning files", - uploadSarifMacro, + postProcessAndUploadSarifMacro, ["test.json", "test.sarif"], undefined, { @@ -148,7 +167,7 @@ test( test( "Code Quality file", - uploadSarifMacro, + postProcessAndUploadSarifMacro, ["test.quality.sarif"], (tempDir) => path.join(tempDir, "test.quality.sarif"), { @@ -163,7 +182,7 @@ test( test( "Mixed files", - uploadSarifMacro, + postProcessAndUploadSarifMacro, ["test.sarif", "test.quality.sarif"], undefined, { @@ -183,3 +202,65 @@ test( }, }, ); + +test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => { + await util.withTmpDir(async (tempDir) => { + const logger = getRunnerLogger(true); + const features = createFeatures([]); + + const toFullPath = (filename: string) => path.join(tempDir, filename); + + const postProcessSarifFiles = mockPostProcessSarifFiles(); + const uploadPostProcessedFiles = sinon.stub( + uploadLib, + "uploadPostProcessedFiles", + ); + + fs.writeFileSync(toFullPath("test.sarif"), ""); + fs.writeFileSync(toFullPath("test.quality.sarif"), ""); + + const actual = await postProcessAndUploadSarif( + logger, + features, + "never", + "", + tempDir, + ); + + t.truthy(actual); + t.assert(postProcessSarifFiles.calledTwice); + t.assert(uploadPostProcessedFiles.notCalled); + }); +}); + +test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => { + await util.withTmpDir(async (tempDir) => { + const logger = getRunnerLogger(true); + const features = createFeatures([]); + + const toFullPath = (filename: string) => path.join(tempDir, filename); + + const postProcessSarifFiles = mockPostProcessSarifFiles(); + + fs.writeFileSync(toFullPath("test.sarif"), ""); + fs.writeFileSync(toFullPath("test.quality.sarif"), ""); + + const postProcessedOutPath = path.join(tempDir, "post-processed"); + const actual = await postProcessAndUploadSarif( + logger, + features, + "never", + "", + tempDir, + "", + postProcessedOutPath, + ); + + t.truthy(actual); + t.assert(postProcessSarifFiles.calledTwice); + t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif"))); + t.assert( + fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")), + ); + }); +}); diff --git a/src/upload-sarif.ts b/src/upload-sarif.ts index 34b912489d..bc2c886982 100644 --- a/src/upload-sarif.ts +++ b/src/upload-sarif.ts @@ -1,3 +1,4 @@ +import { UploadKind } from "./actions-util"; import * as analyses from "./analyses"; import { FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; @@ -10,22 +11,26 @@ export type UploadSarifResults = Partial< >; /** - * Finds SARIF files in `sarifPath` and uploads them to the appropriate services. + * Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services. * * @param logger The logger to use. * @param features Information about enabled features. + * @param uploadKind The kind of upload that is requested. * @param checkoutPath The path where the repository was checked out at. * @param sarifPath The path to the file or directory to upload. * @param category The analysis category. + * @param postProcessedOutputPath The path to a directory to which the post-processed SARIF files should be written to. * * @returns A partial mapping from analysis kinds to the upload results. */ -export async function uploadSarif( +export async function postProcessAndUploadSarif( logger: Logger, features: FeatureEnablement, + uploadKind: UploadKind, checkoutPath: string, sarifPath: string, category?: string, + postProcessedOutputPath?: string, ): Promise { const sarifGroups = await upload_lib.getGroupedSarifFilePaths( logger, @@ -37,14 +42,33 @@ export async function uploadSarif( sarifGroups, )) { const analysisConfig = analyses.getAnalysisConfig(analysisKind); - uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles( - sarifFiles, + const postProcessingResults = await upload_lib.postProcessSarifFiles( + logger, + features, checkoutPath, + sarifFiles, category, - features, + analysisConfig, + ); + + // Write the post-processed SARIF files to disk. This will only write them if needed based on user inputs + // or environment variables. + await upload_lib.writePostProcessedFiles( logger, + postProcessedOutputPath, analysisConfig, + postProcessingResults, ); + + // Only perform the actual upload of the post-processed files if `uploadKind` is `always`. + if (uploadKind === "always") { + uploadResults[analysisKind] = await upload_lib.uploadPostProcessedFiles( + logger, + checkoutPath, + analysisConfig, + postProcessingResults, + ); + } } return uploadResults;