-
Notifications
You must be signed in to change notification settings - Fork 356
/
Copy pathanalyze.js
464 lines (464 loc) · 22.2 KB
/
analyze.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.CodeQLAnalysisError = void 0;
exports.runExtraction = runExtraction;
exports.dbIsFinalized = dbIsFinalized;
exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun;
exports.runQueries = runQueries;
exports.runFinalize = runFinalize;
exports.warnIfGoInstalledAfterInit = warnIfGoInstalledAfterInit;
exports.runCleanup = runCleanup;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const perf_hooks_1 = require("perf_hooks");
const io = __importStar(require("@actions/io"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml"));
const actionsUtil = __importStar(require("./actions-util"));
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const diagnostics_1 = require("./diagnostics");
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const gitUtils = __importStar(require("./git-utils"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const tools_features_1 = require("./tools-features");
const tracer_config_1 = require("./tracer-config");
const upload_lib_1 = require("./upload-lib");
const util = __importStar(require("./util"));
const util_1 = require("./util");
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message, error) {
super(message);
this.queriesStatusReport = queriesStatusReport;
this.message = message;
this.error = error;
this.name = "CodeQLAnalysisError";
}
}
exports.CodeQLAnalysisError = CodeQLAnalysisError;
async function setupPythonExtractor(logger) {
const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) {
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
return;
}
logger.warning("The CODEQL_PYTHON environment variable is no longer supported. Please remove it from your workflow. This environment variable was originally used to specify a Python executable that included the dependencies of your Python code, however Python analysis no longer uses these dependencies." +
"\nIf you used CODEQL_PYTHON to force the version of Python to analyze as, please use CODEQL_EXTRACTOR_PYTHON_ANALYSIS_VERSION instead, such as 'CODEQL_EXTRACTOR_PYTHON_ANALYSIS_VERSION=2.7' or 'CODEQL_EXTRACTOR_PYTHON_ANALYSIS_VERSION=3.11'.");
return;
}
async function runExtraction(codeql, config, logger) {
for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) {
logger.debug(`Database for ${language} has already been finalized, skipping extraction.`);
continue;
}
if (shouldExtractLanguage(config, language)) {
logger.startGroup(`Extracting ${language}`);
if (language === languages_1.Language.python) {
await setupPythonExtractor(logger);
}
if (config.buildMode &&
(await codeql.supportsFeature(tools_features_1.ToolsFeature.TraceCommandUseBuildMode))) {
if (language === languages_1.Language.cpp &&
config.buildMode === util_1.BuildMode.Autobuild) {
await (0, autobuild_1.setupCppAutobuild)(codeql, logger);
}
await codeql.extractUsingBuildMode(config, language);
}
else {
await codeql.extractScannedLanguage(config, language);
}
logger.endGroup();
}
}
}
function shouldExtractLanguage(config, language) {
return (config.buildMode === util_1.BuildMode.None ||
(config.buildMode === util_1.BuildMode.Autobuild &&
process.env[environment_1.EnvVar.AUTOBUILD_DID_COMPLETE_SUCCESSFULLY] !== "true") ||
(!config.buildMode && (0, languages_1.isScannedLanguage)(language)));
}
function dbIsFinalized(config, language, logger) {
const dbPath = util.getCodeQLDatabasePath(config, language);
try {
const dbInfo = yaml.load(fs.readFileSync(path.resolve(dbPath, "codeql-database.yml"), "utf8"));
return !("inProgress" in dbInfo);
}
catch {
logger.warning(`Could not check whether database for ${language} was finalized. Assuming it is not.`);
return false;
}
}
async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag, logger) {
const extractionStart = perf_hooks_1.performance.now();
await runExtraction(codeql, config, logger);
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
const trapImportStart = perf_hooks_1.performance.now();
for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) {
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
}
else {
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag, memoryFlag, config.debugMode);
logger.endGroup();
}
}
const trapImportTime = perf_hooks_1.performance.now() - trapImportStart;
return {
scanned_language_extraction_duration_ms: Math.round(extractionTime),
trap_import_duration_ms: Math.round(trapImportTime),
};
}
/**
* Set up the diff-informed analysis feature.
*
* @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range.
* @param codeql
* @param logger
* @param features
* @returns Absolute path to the directory containing the extension pack for
* the diff range information, or `undefined` if the feature is disabled.
*/
async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, features) {
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
return undefined;
}
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger);
return writeDiffRangeDataExtensionPack(logger, diffRanges);
});
}
/**
* Return the file line ranges that were added or modified in the pull request.
*
* @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range.
* @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an
* added or modified range in that file. Returns `undefined` if the action was
* not triggered by a pull request or if there was an error.
*/
async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
const checkoutPath = actionsUtil.getOptionalInput("checkout_path");
if (checkoutPath === undefined) {
return undefined;
}
// To compute the merge bases between the base branch and the PR topic branch,
// we need to fetch the commit graph from the branch heads to those merge
// babes. The following 6-step procedure does so while limiting the amount of
// history fetched.
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
// topic branch head, so that the PR merge commit is no longer considered a
// grafted commit.
await gitUtils.deepenGitHistory();
// Step 2: Fetch the base branch shallow history. This step ensures that the
// base branch name is present in the local repository. Normally the base
// branch name would be added by Step 4. However, if the base branch head is
// an ancestor of the PR topic branch head, Step 4 would fail without doing
// anything, so we need to fetch the base branch explicitly.
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
// that are reachable from the base branch head.
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
// Step 4: Fetch the base branch history, stopping when we reach commits that
// are reachable from the PR topic branch head.
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
// Step 5: Repack the history to remove the shallow grafts that were added by
// the previous fetches. This step works around a bug that causes subsequent
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
// See https://stackoverflow.com/q/63878612
await gitUtils.gitRepack(["-d"]);
// Step 6: Deepen the history so that we have the merge bases between the base
// branch and the PR topic branch.
await gitUtils.deepenGitHistory();
// To compute the exact same diff as GitHub would compute for the PR, we need
// to use the same merge base as GitHub. That is easy to do if there is only
// one merge base, which is by far the most common case. If there are multiple
// merge bases, we stop without producing a diff range.
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
if (mergeBases.length !== 1) {
logger.info("Cannot compute diff range because baseRef and headRef " +
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
return undefined;
}
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(mergeBases[0], headRef);
if (diffHunkHeaders === undefined) {
return undefined;
}
const results = new Array();
let changedFile = "";
for (const line of diffHunkHeaders) {
if (line.startsWith("+++ ")) {
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
if (filePath.startsWith("b/")) {
// The file was edited: track all hunks in the file
changedFile = filePath.substring(2);
}
else if (filePath === "/dev/null") {
// The file was deleted: skip all hunks in the file
changedFile = "";
}
else {
logger.warning(`Failed to parse diff hunk header line: ${line}`);
return undefined;
}
continue;
}
if (line.startsWith("@@ ")) {
if (changedFile === "")
continue;
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
if (match === null) {
logger.warning(`Failed to parse diff hunk header line: ${line}`);
return undefined;
}
const startLine = parseInt(match[1], 10);
const numLines = parseInt(match[2], 10);
if (numLines === 0) {
// The hunk was a deletion: skip it
continue;
}
const endLine = startLine + (numLines || 1) - 1;
results.push({
path: path.join(checkoutPath, changedFile),
startLine,
endLine,
});
}
}
return results;
}
/**
* Create an extension pack in the temporary directory that contains the file
* line ranges that were added or modified in the pull request.
*
* @param logger
* @param ranges The file line ranges, as returned by
* `getPullRequestEditedDiffRanges`.
* @returns The absolute path of the directory containing the extension pack, or
* `undefined` if no extension pack was created.
*/
function writeDiffRangeDataExtensionPack(logger, ranges) {
if (ranges === undefined) {
return undefined;
}
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
fs.mkdirSync(diffRangeDir);
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
name: codeql-action/pr-diff-range
version: 0.0.0
library: true
extensionTargets:
codeql/util: '*'
dataExtensions:
- pr-diff-range.yml
`);
const header = `
extensions:
- addsTo:
pack: codeql/util
extensible: restrictAlertsTo
data:
`;
let data = ranges
.map((range) => ` - ["${range.path}", ${range.startLine}, ${range.endLine}]\n`)
.join("");
if (!data) {
// Ensure that the data extension is not empty, so that a pull request with
// no edited lines would exclude (instead of accepting) all alerts.
data = ' - ["", 0, 0]\n';
}
const extensionContents = header + data;
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
fs.writeFileSync(extensionFilePath, extensionContents);
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
return diffRangeDir;
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, config, logger, features) {
const statusReport = {};
const dataExtensionFlags = diffRangePackDir
? [
`--additional-packs=${diffRangePackDir}`,
"--extension-packs=codeql-action/pr-diff-range",
]
: [];
const sarifRunPropertyFlag = diffRangePackDir
? "--sarif-run-property=incrementalMode=diff-informed"
: undefined;
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const queryFlags = [memoryFlag, threadsFlag, ...dataExtensionFlags];
for (const language of config.languages) {
try {
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
// The work needed to generate the query suites
// is done in the CLI. We just need to make a single
// call to run all the queries for each language and
// another to interpret the results.
logger.startGroup(`Running queries for ${language}`);
const startTimeRunQueries = new Date().getTime();
const databasePath = util.getCodeQLDatabasePath(config, language);
await codeql.databaseRunQueries(databasePath, queryFlags);
logger.debug(`Finished running queries for ${language}.`);
// TODO should not be using `builtin` here. We should be using `all` instead.
// The status report does not support `all` yet.
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeRunQueries;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date();
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
const endTimeInterpretResults = new Date();
statusReport[`interpret_results_${language}_duration_ms`] =
endTimeInterpretResults.getTime() - startTimeInterpretResults.getTime();
logger.endGroup();
logger.info(analysisSummary);
if (await features.getValue(feature_flags_1.Feature.QaTelemetryEnabled)) {
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
const perQueryAlertCountEventReport = {
event: "codeql database interpret-results",
started_at: startTimeInterpretResults.toISOString(),
completed_at: endTimeInterpretResults.toISOString(),
exit_status: "success",
language,
properties: {
alertCounts: perQueryAlertCounts,
},
};
if (statusReport["event_reports"] === undefined) {
statusReport["event_reports"] = [];
}
statusReport["event_reports"].push(perQueryAlertCountEventReport);
}
if (!(await util.codeQlVersionAtLeast(codeql, codeql_1.CODEQL_VERSION_ANALYSIS_SUMMARY_V2))) {
await runPrintLinesOfCode(language);
}
}
catch (e) {
statusReport.analyze_failure_language = language;
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${util.getErrorMessage(e)}`, util.wrapError(e));
}
}
return statusReport;
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, automationDetailsId, config, features);
}
/** Get an object with all queries and their counts parsed from a SARIF file path. */
function getPerQueryAlertCounts(sarifPath, log) {
(0, upload_lib_1.validateSarifFileSchema)(sarifPath, log);
const sarifObject = JSON.parse(fs.readFileSync(sarifPath, "utf8"));
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object
const perQueryAlertCounts = {};
// All rules (queries), from all results, from all runs
for (const sarifRun of sarifObject.runs) {
if (sarifRun.results) {
for (const result of sarifRun.results) {
const query = result.rule?.id || result.ruleId;
if (query) {
perQueryAlertCounts[query] = (perQueryAlertCounts[query] || 0) + 1;
}
}
}
}
return perQueryAlertCounts;
}
async function runPrintLinesOfCode(language) {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databasePrintBaseline(databasePath);
}
}
async function runFinalize(outputDir, threadsFlag, memoryFlag, codeql, config, logger) {
try {
await (0, del_1.default)(outputDir, { force: true });
}
catch (error) {
if (error?.code !== "ENOENT") {
throw error;
}
}
await fs.promises.mkdir(outputDir, { recursive: true });
const timings = await finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag, logger);
// If we didn't already end tracing in the autobuild Action, end it now.
if (process.env[environment_1.EnvVar.AUTOBUILD_DID_COMPLETE_SUCCESSFULLY] !== "true") {
await (0, tracer_config_1.endTracingForCluster)(codeql, config, logger);
}
return timings;
}
async function warnIfGoInstalledAfterInit(config, logger) {
// Check that `which go` still points at the same path it did when the `init` Action ran to ensure that no steps
// in-between performed any setup. We encourage users to perform all setup tasks before initializing CodeQL so that
// the setup tasks do not interfere with our analysis.
// Furthermore, if we installed a wrapper script in the `init` Action, we need to ensure that there isn't a step
// in the workflow after the `init` step which installs a different version of Go and takes precedence in the PATH,
// thus potentially circumventing our workaround that allows tracing to work.
const goInitPath = process.env[environment_1.EnvVar.GO_BINARY_LOCATION];
if (process.env[environment_1.EnvVar.DID_AUTOBUILD_GOLANG] !== "true" &&
goInitPath !== undefined) {
const goBinaryPath = await io.which("go", true);
if (goInitPath !== goBinaryPath) {
logger.warning(`Expected \`which go\` to return ${goInitPath}, but got ${goBinaryPath}: please ensure that the correct version of Go is installed before the \`codeql-action/init\` Action is used.`);
(0, diagnostics_1.addDiagnostic)(config, languages_1.Language.go, (0, diagnostics_1.makeDiagnostic)("go/workflow/go-installed-after-codeql-init", "Go was installed after the `codeql-action/init` Action was run", {
markdownMessage: "To avoid interfering with the CodeQL analysis, perform all installation steps before calling the `github/codeql-action/init` Action.",
visibility: {
statusPage: true,
telemetry: true,
cliSummaryTable: true,
},
severity: "warning",
}));
}
}
}
async function runCleanup(config, cleanupLevel, logger) {
logger.startGroup("Cleaning up databases");
for (const language of config.languages) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const databasePath = util.getCodeQLDatabasePath(config, language);
await codeql.databaseCleanup(databasePath, cleanupLevel);
}
logger.endGroup();
}
//# sourceMappingURL=analyze.js.map