-
Notifications
You must be signed in to change notification settings - Fork 355
/
Copy pathupload-lib.js
299 lines (299 loc) · 13.7 KB
/
upload-lib.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.validateUniqueCategory = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
const core = __importStar(require("@actions/core"));
const file_url_1 = __importDefault(require("file-url"));
const jsonschema = __importStar(require("jsonschema"));
const semver = __importStar(require("semver"));
const actionsUtil = __importStar(require("./actions-util"));
const api = __importStar(require("./api-client"));
const fingerprints = __importStar(require("./fingerprints"));
const repository_1 = require("./repository");
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) {
const combinedSarif = {
version: null,
runs: [],
};
for (const sarifFile of sarifFiles) {
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
// Check SARIF version
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
}
else if (combinedSarif.version !== sarifObject.version) {
throw new Error(`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`);
}
combinedSarif.runs.push(...sarifObject.runs);
}
return JSON.stringify(combinedSarif);
}
exports.combineSarifFiles = combineSarifFiles;
// Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents.
function populateRunAutomationDetails(sarifContents, category, analysis_key, environment) {
if (analysis_key === undefined) {
return sarifContents;
}
const automationID = getAutomationID(category, analysis_key, environment);
const sarif = JSON.parse(sarifContents);
for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
}
}
return JSON.stringify(sarif);
}
exports.populateRunAutomationDetails = populateRunAutomationDetails;
function getAutomationID(category, analysis_key, environment) {
if (category !== undefined) {
let automationID = category;
if (!automationID.endsWith("/")) {
automationID += "/";
}
return automationID;
}
return actionsUtil.computeAutomationID(analysis_key, environment);
}
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
logger.info("Uploading results");
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
return;
}
const client = api.getApiClient(apiDetails);
const reqURL = util.isActions()
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
function findSarifFilesInDir(sarifPath) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && entry.name.endsWith(".sarif")) {
sarifFiles.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
walkSarifFiles(path.resolve(dir, entry.name));
}
}
};
walkSarifFiles(sarifPath);
return sarifFiles;
}
exports.findSarifFilesInDir = findSarifFilesInDir;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
}
exports.uploadFromActions = uploadFromActions;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, category, sourceRoot, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, category, undefined, undefined, sourceRoot, undefined, gitHubVersion, apiDetails, logger);
}
exports.uploadFromRunner = uploadFromRunner;
function getSarifFilePaths(sarifPath) {
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
let sarifFiles;
if (fs.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath);
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
}
else {
sarifFiles = [sarifPath];
}
return sarifFiles;
}
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif) {
let numResults = 0;
let parsedSarif;
try {
parsedSarif = JSON.parse(sarif);
}
catch (e) {
throw new Error(`Invalid SARIF. JSON syntax error: ${e instanceof Error ? e.message : String(e)}`);
}
if (!Array.isArray(parsedSarif.runs)) {
throw new Error("Invalid SARIF. Missing 'runs' array.");
}
for (const run of parsedSarif.runs) {
if (!Array.isArray(run.results)) {
throw new Error("Invalid SARIF. Missing 'results' array in run.");
}
numResults += run.results.length;
}
return numResults;
}
exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const schema = require("../src/sarif_v2.1.0_schema.json");
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
logger.startGroup(`Error details: ${error.stack}`);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
throw new Error(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`);
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// buildPayload constructs a map ready to be uploaded to the API from the given
// parameters, respecting the current mode and target GitHub instance version.
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion) {
if (util.isActions()) {
const payloadObj = {
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zippedSarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
base_ref: undefined,
base_sha: undefined,
};
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
}
}
return payloadObj;
}
else {
return {
commit_sha: commitOid,
ref,
sarif: zippedSarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
};
}
}
exports.buildPayload = buildPayload;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
validateUniqueCategory(category);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
}
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = (0, file_url_1.default)(sourceRoot);
const toolNames = util.getToolNames(sarifPayload);
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
const zippedUploadSizeBytes = zippedSarif.length;
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
}
function validateUniqueCategory(category) {
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
"Please specify a unique `category` to call this action multiple times. " +
`Category: ${category ? category : "(none)"}`);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
}
exports.validateUniqueCategory = validateUniqueCategory;
/**
* Santizes a string to be used as an environment variable name.
* This will replace all non-alphanumeric characters with underscores.
* There could still be some false category clashes if two uploads
* occur that differ only in their non-alphanumeric characters. This is
* unlikely.
*
* @param str the initial value to sanitize
*/
function sanitize(str) {
return str.replace(/[^a-zA-Z0-9_]/g, "_");
}
//# sourceMappingURL=upload-lib.js.map