Merge remote-tracking branch 'upstream/main' into aeisenberg/run-with-all-data-extensions
This commit is contained in:
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -48,6 +48,14 @@
|
||||
"env": {
|
||||
"LANG": "en-US",
|
||||
"TZ": "UTC",
|
||||
|
||||
// Uncomment to set a custom path to a CodeQL checkout.
|
||||
// "TEST_CODEQL_PATH": "../codeql",
|
||||
|
||||
// Uncomment to set a custom path to a CodeQL CLI executable.
|
||||
// This is the CodeQL version that will be used in the tests.
|
||||
// "CLI_PATH": "/path/to/customg/codeql",
|
||||
|
||||
// Uncomment to debug integration tests
|
||||
// "VSCODE_WAIT_FOR_DEBUGGER": "true",
|
||||
}
|
||||
|
||||
@@ -377,7 +377,7 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLVariantAnalysisRepositories.openConfigFile",
|
||||
"title": "Open Database Configuration File",
|
||||
"title": "Open database configuration file",
|
||||
"icon": "$(json)"
|
||||
},
|
||||
{
|
||||
@@ -476,7 +476,7 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"title": "Set Current Database"
|
||||
"title": "Select"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.removeDatabase",
|
||||
|
||||
@@ -254,6 +254,11 @@ export class DbPanel extends DisposableObject {
|
||||
"Not a selectable database item. Please select a valid item.",
|
||||
);
|
||||
}
|
||||
|
||||
// Optimistically update the UI to select the item that the user
|
||||
// selected to avoid delay in the UI.
|
||||
this.dataProvider.updateSelectedItem(treeViewItem);
|
||||
|
||||
await this.dbManager.setSelectedDbItem(treeViewItem.dbItem);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,6 +48,23 @@ export class DbTreeDataProvider
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the selected item and re-renders the tree.
|
||||
* @param selectedItem The item to select.
|
||||
*/
|
||||
public updateSelectedItem(selectedItem: DbTreeViewItem): void {
|
||||
// Unselect all items
|
||||
for (const item of this.dbTreeItems) {
|
||||
item.setAsUnselected();
|
||||
}
|
||||
|
||||
// Select the new item
|
||||
selectedItem.setAsSelected();
|
||||
|
||||
// Re-render the tree
|
||||
this._onDidChangeTreeData.fire(undefined);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when expanding a node (including the root node).
|
||||
* @param node The node to expand.
|
||||
|
||||
@@ -36,11 +36,19 @@ export class DbTreeViewItem extends vscode.TreeItem {
|
||||
if (dbItem) {
|
||||
this.contextValue = getContextValue(dbItem);
|
||||
if (isSelectableDbItem(dbItem) && dbItem.selected) {
|
||||
// Define the resource id to drive the UI to render this item as selected.
|
||||
this.resourceUri = vscode.Uri.parse(SELECTED_DB_ITEM_RESOURCE_URI);
|
||||
this.setAsSelected();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public setAsSelected(): void {
|
||||
// Define the resource id to drive the UI to render this item as selected.
|
||||
this.resourceUri = vscode.Uri.parse(SELECTED_DB_ITEM_RESOURCE_URI);
|
||||
}
|
||||
|
||||
public setAsUnselected(): void {
|
||||
this.resourceUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getContextValue(dbItem: DbItem): string | undefined {
|
||||
|
||||
@@ -4,28 +4,28 @@ import {
|
||||
CancellationTokenSource,
|
||||
commands,
|
||||
Disposable,
|
||||
env,
|
||||
ExtensionContext,
|
||||
extensions,
|
||||
languages,
|
||||
ProgressLocation,
|
||||
ProgressOptions,
|
||||
Uri,
|
||||
window as Window,
|
||||
env,
|
||||
window,
|
||||
ProviderResult,
|
||||
QuickPickItem,
|
||||
Range,
|
||||
workspace,
|
||||
ProviderResult,
|
||||
Uri,
|
||||
version as vscodeVersion,
|
||||
window as Window,
|
||||
window,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import { LanguageClient } from "vscode-languageclient/node";
|
||||
import { platform, arch } from "os";
|
||||
import { arch, platform } from "os";
|
||||
import { ensureDir } from "fs-extra";
|
||||
import { join, basename } from "path";
|
||||
import { basename, join } from "path";
|
||||
import { dirSync } from "tmp-promise";
|
||||
import { testExplorerExtensionId, TestHub } from "vscode-test-adapter-api";
|
||||
import { parse, lt } from "semver";
|
||||
import { lt, parse } from "semver";
|
||||
|
||||
import { AstViewer } from "./astViewer";
|
||||
import {
|
||||
@@ -38,7 +38,6 @@ import {
|
||||
CliConfigListener,
|
||||
DistributionConfigListener,
|
||||
isCanary,
|
||||
isVariantAnalysisLiveResultsEnabled,
|
||||
joinOrderWarningThreshold,
|
||||
MAX_QUERIES,
|
||||
QueryHistoryConfigListener,
|
||||
@@ -48,10 +47,10 @@ import { install } from "./languageSupport";
|
||||
import { DatabaseItem, DatabaseManager } from "./databases";
|
||||
import { DatabaseUI } from "./databases-ui";
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider,
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
} from "./contextual/templateProvider";
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -65,22 +64,22 @@ import {
|
||||
} from "./distribution";
|
||||
import {
|
||||
findLanguage,
|
||||
tmpDirDisposal,
|
||||
showBinaryChoiceDialog,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogInformationMessage,
|
||||
showAndLogWarningMessage,
|
||||
showBinaryChoiceDialog,
|
||||
showInformationMessageWithAction,
|
||||
tmpDir,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
tmpDirDisposal,
|
||||
} from "./helpers";
|
||||
import { asError, assertNever, getErrorMessage } from "./pure/helpers-pure";
|
||||
import { spawnIdeServer } from "./ide-server";
|
||||
import { ResultsView } from "./interface";
|
||||
import { WebviewReveal } from "./interface-utils";
|
||||
import {
|
||||
ideServerLogger,
|
||||
extLogger,
|
||||
ideServerLogger,
|
||||
ProgressReporter,
|
||||
queryServerLogger,
|
||||
} from "./common";
|
||||
@@ -98,13 +97,10 @@ import {
|
||||
commandRunner,
|
||||
commandRunnerWithProgress,
|
||||
ProgressCallback,
|
||||
withProgress,
|
||||
ProgressUpdate,
|
||||
withProgress,
|
||||
} from "./commandRunner";
|
||||
import { CodeQlStatusBarHandler } from "./status-bar";
|
||||
|
||||
import { RemoteQueriesManager } from "./remote-queries/remote-queries-manager";
|
||||
import { RemoteQueryResult } from "./remote-queries/remote-query-result";
|
||||
import { URLSearchParams } from "url";
|
||||
import {
|
||||
handleDownloadPacks,
|
||||
@@ -112,11 +108,9 @@ import {
|
||||
} from "./packaging";
|
||||
import { HistoryItemLabelProvider } from "./query-history/history-item-label-provider";
|
||||
import {
|
||||
exportRemoteQueryResults,
|
||||
exportSelectedRemoteQueryResults,
|
||||
exportVariantAnalysisResults,
|
||||
} from "./remote-queries/export-results";
|
||||
import { RemoteQuery } from "./remote-queries/remote-query";
|
||||
import { EvalLogViewer } from "./eval-log-viewer";
|
||||
import { SummaryLanguageSupport } from "./log-insights/summary-language-support";
|
||||
import { JoinOrderScannerProvider } from "./log-insights/join-order";
|
||||
@@ -656,23 +650,11 @@ async function activateWithInstalledDistribution(
|
||||
),
|
||||
);
|
||||
|
||||
void extLogger.log("Initializing remote queries manager.");
|
||||
const rqm = new RemoteQueriesManager(
|
||||
ctx,
|
||||
app,
|
||||
cliServer,
|
||||
queryStorageDir,
|
||||
extLogger,
|
||||
);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
void extLogger.log("Initializing query history.");
|
||||
const qhm = new QueryHistoryManager(
|
||||
app,
|
||||
qs,
|
||||
dbm,
|
||||
localQueryResultsView,
|
||||
rqm,
|
||||
variantAnalysisManager,
|
||||
evalLogViewer,
|
||||
queryStorageDir,
|
||||
@@ -1112,19 +1094,11 @@ async function activateWithInstalledDistribution(
|
||||
message: "Getting credentials",
|
||||
});
|
||||
|
||||
if (isVariantAnalysisLiveResultsEnabled()) {
|
||||
await variantAnalysisManager.runVariantAnalysis(
|
||||
uri || window.activeTextEditor?.document.uri,
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
} else {
|
||||
await rqm.runRemoteQuery(
|
||||
uri || window.activeTextEditor?.document.uri,
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
}
|
||||
await variantAnalysisManager.runVariantAnalysis(
|
||||
uri || window.activeTextEditor?.document.uri,
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Variant analysis requires the CodeQL Canary version to run.",
|
||||
@@ -1138,21 +1112,6 @@ async function activateWithInstalledDistribution(
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
"codeQL.monitorRemoteQuery",
|
||||
async (queryId: string, query: RemoteQuery, token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(queryId, query, token);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner("codeQL.copyRepoList", async (queryId: string) => {
|
||||
await rqm.copyRemoteQueryRepoListToClipboard(queryId);
|
||||
}),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
"codeQL.openVariantAnalysisLogs",
|
||||
@@ -1215,30 +1174,12 @@ async function activateWithInstalledDistribution(
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
"codeQL.autoDownloadRemoteQueryResults",
|
||||
async (queryResult: RemoteQueryResult, token: CancellationToken) => {
|
||||
await rqm.autoDownloadRemoteQueryResults(queryResult, token);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner("codeQL.exportSelectedVariantAnalysisResults", async () => {
|
||||
await exportSelectedRemoteQueryResults(qhm);
|
||||
}),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
"codeQL.exportRemoteQueryResults",
|
||||
async (queryId: string) => {
|
||||
await exportRemoteQueryResults(qhm, rqm, queryId, app.credentials);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
"codeQL.exportVariantAnalysisResults",
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
getRawQueryName,
|
||||
QueryHistoryInfo,
|
||||
} from "./query-history-info";
|
||||
import { RemoteQueryHistoryItem } from "../remote-queries/remote-query-history-item";
|
||||
import { VariantAnalysisHistoryItem } from "./variant-analysis-history-item";
|
||||
import { assertNever } from "../pure/helpers-pure";
|
||||
import { pluralize } from "../pure/word";
|
||||
@@ -34,9 +33,6 @@ export class HistoryItemLabelProvider {
|
||||
case "local":
|
||||
replacements = this.getLocalInterpolateReplacements(item);
|
||||
break;
|
||||
case "remote":
|
||||
replacements = this.getRemoteInterpolateReplacements(item);
|
||||
break;
|
||||
case "variant-analysis":
|
||||
replacements = this.getVariantAnalysisInterpolateReplacements(item);
|
||||
break;
|
||||
@@ -92,25 +88,6 @@ export class HistoryItemLabelProvider {
|
||||
};
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(
|
||||
item: RemoteQueryHistoryItem,
|
||||
): InterpolateReplacements {
|
||||
const resultCount = item.resultCount
|
||||
? `(${pluralize(item.resultCount, "result", "results")})`
|
||||
: "";
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(
|
||||
env.language,
|
||||
),
|
||||
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||
d: buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: humanizeQueryStatus(item.status),
|
||||
f: basename(item.remoteQuery.queryFilePath),
|
||||
"%": "%",
|
||||
};
|
||||
}
|
||||
|
||||
private getVariantAnalysisInterpolateReplacements(
|
||||
item: VariantAnalysisHistoryItem,
|
||||
): InterpolateReplacements {
|
||||
|
||||
@@ -236,8 +236,6 @@ export class HistoryTreeDataProvider
|
||||
switch (item.t) {
|
||||
case "local":
|
||||
return item.initialInfo.start.getTime();
|
||||
case "remote":
|
||||
return item.remoteQuery.executionStartTime;
|
||||
case "variant-analysis":
|
||||
return item.variantAnalysis.executionStartTime;
|
||||
default:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { RemoteQueryHistoryItem } from "../remote-queries/remote-query-history-item";
|
||||
import { VariantAnalysisHistoryItem } from "./variant-analysis-history-item";
|
||||
import { LocalQueryInfo } from "../query-results";
|
||||
import { assertNever } from "../pure/helpers-pure";
|
||||
@@ -8,17 +7,12 @@ import {
|
||||
getActionsWorkflowRunUrl as getVariantAnalysisActionsWorkflowRunUrl,
|
||||
} from "../remote-queries/shared/variant-analysis";
|
||||
|
||||
export type QueryHistoryInfo =
|
||||
| LocalQueryInfo
|
||||
| RemoteQueryHistoryItem
|
||||
| VariantAnalysisHistoryItem;
|
||||
export type QueryHistoryInfo = LocalQueryInfo | VariantAnalysisHistoryItem;
|
||||
|
||||
export function getRawQueryName(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case "local":
|
||||
return item.getQueryName();
|
||||
case "remote":
|
||||
return item.remoteQuery.queryName;
|
||||
case "variant-analysis":
|
||||
return item.variantAnalysis.query.name;
|
||||
default:
|
||||
@@ -37,8 +31,6 @@ export function getQueryId(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case "local":
|
||||
return item.initialInfo.id;
|
||||
case "remote":
|
||||
return item.queryId;
|
||||
case "variant-analysis":
|
||||
return item.variantAnalysis.id.toString();
|
||||
default:
|
||||
@@ -50,8 +42,6 @@ export function getQueryText(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case "local":
|
||||
return item.initialInfo.queryText;
|
||||
case "remote":
|
||||
return item.remoteQuery.queryText;
|
||||
case "variant-analysis":
|
||||
return item.variantAnalysis.query.text;
|
||||
default:
|
||||
@@ -59,47 +49,23 @@ export function getQueryText(item: QueryHistoryInfo): string {
|
||||
}
|
||||
}
|
||||
|
||||
export function buildRepoLabel(
|
||||
item: RemoteQueryHistoryItem | VariantAnalysisHistoryItem,
|
||||
): string {
|
||||
if (item.t === "remote") {
|
||||
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
|
||||
const repositoryCount = item.remoteQuery.repositoryCount;
|
||||
export function buildRepoLabel(item: VariantAnalysisHistoryItem): string {
|
||||
const totalScannedRepositoryCount =
|
||||
item.variantAnalysis.scannedRepos?.length ?? 0;
|
||||
const completedRepositoryCount =
|
||||
item.variantAnalysis.scannedRepos?.filter((repo) =>
|
||||
hasRepoScanCompleted(repo),
|
||||
).length ?? 0;
|
||||
|
||||
if (repositoryCount) {
|
||||
return pluralize(repositoryCount, "repository", "repositories");
|
||||
}
|
||||
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
|
||||
} else if (item.t === "variant-analysis") {
|
||||
const totalScannedRepositoryCount =
|
||||
item.variantAnalysis.scannedRepos?.length ?? 0;
|
||||
const completedRepositoryCount =
|
||||
item.variantAnalysis.scannedRepos?.filter((repo) =>
|
||||
hasRepoScanCompleted(repo),
|
||||
).length ?? 0;
|
||||
|
||||
return `${completedRepositoryCount}/${pluralize(
|
||||
totalScannedRepositoryCount,
|
||||
"repository",
|
||||
"repositories",
|
||||
)}`; // e.g. "2/3 repositories"
|
||||
} else {
|
||||
assertNever(item);
|
||||
}
|
||||
return `${completedRepositoryCount}/${pluralize(
|
||||
totalScannedRepositoryCount,
|
||||
"repository",
|
||||
"repositories",
|
||||
)}`; // e.g. "2/3 repositories"
|
||||
}
|
||||
|
||||
export function getActionsWorkflowRunUrl(
|
||||
item: RemoteQueryHistoryItem | VariantAnalysisHistoryItem,
|
||||
item: VariantAnalysisHistoryItem,
|
||||
): string {
|
||||
if (item.t === "remote") {
|
||||
const {
|
||||
actionsWorkflowRunId: workflowRunId,
|
||||
controllerRepository: { owner, name },
|
||||
} = item.remoteQuery;
|
||||
return `https://github.com/${owner}/${name}/actions/runs/${workflowRunId}`;
|
||||
} else if (item.t === "variant-analysis") {
|
||||
return getVariantAnalysisActionsWorkflowRunUrl(item.variantAnalysis);
|
||||
} else {
|
||||
assertNever(item);
|
||||
}
|
||||
return getVariantAnalysisActionsWorkflowRunUrl(item.variantAnalysis);
|
||||
}
|
||||
|
||||
@@ -53,9 +53,6 @@ import {
|
||||
import { pathExists } from "fs-extra";
|
||||
import { CliVersionConstraint } from "../cli";
|
||||
import { HistoryItemLabelProvider } from "./history-item-label-provider";
|
||||
import { cancelRemoteQuery } from "../remote-queries/gh-api/gh-actions-api-client";
|
||||
import { RemoteQueriesManager } from "../remote-queries/remote-queries-manager";
|
||||
import { RemoteQueryHistoryItem } from "../remote-queries/remote-query-history-item";
|
||||
import { ResultsView } from "../interface";
|
||||
import { WebviewReveal } from "../interface-utils";
|
||||
import { EvalLogViewer } from "../eval-log-viewer";
|
||||
@@ -66,7 +63,6 @@ import { QueryRunner } from "../queryRunner";
|
||||
import { VariantAnalysisManager } from "../remote-queries/variant-analysis-manager";
|
||||
import { VariantAnalysisHistoryItem } from "./variant-analysis-history-item";
|
||||
import { getTotalResultCount } from "../remote-queries/shared/variant-analysis";
|
||||
import { App } from "../common/app";
|
||||
import { HistoryTreeDataProvider } from "./history-tree-data-provider";
|
||||
import { redactableError } from "../pure/errors";
|
||||
|
||||
@@ -138,11 +134,9 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
readonly onDidCompleteQuery = this._onDidCompleteQuery.event;
|
||||
|
||||
constructor(
|
||||
private readonly app: App,
|
||||
private readonly qs: QueryRunner,
|
||||
private readonly dbm: DatabaseManager,
|
||||
private readonly localQueriesResultsView: ResultsView,
|
||||
private readonly remoteQueriesManager: RemoteQueriesManager,
|
||||
private readonly variantAnalysisManager: VariantAnalysisManager,
|
||||
private readonly evalLogViewer: EvalLogViewer,
|
||||
private readonly queryStorageDir: string,
|
||||
@@ -372,7 +366,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
);
|
||||
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, this, ctx);
|
||||
this.registerToRemoteQueriesEvents();
|
||||
this.registerToVariantAnalysisEvents();
|
||||
}
|
||||
|
||||
@@ -477,57 +470,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.push(variantAnalysisRemovedSubscription);
|
||||
}
|
||||
|
||||
private registerToRemoteQueriesEvents() {
|
||||
const queryAddedSubscription = this.remoteQueriesManager.onRemoteQueryAdded(
|
||||
async (event) => {
|
||||
this.addQuery({
|
||||
t: "remote",
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
queryId: event.queryId,
|
||||
remoteQuery: event.query,
|
||||
});
|
||||
|
||||
await this.refreshTreeView();
|
||||
},
|
||||
);
|
||||
|
||||
const queryRemovedSubscription =
|
||||
this.remoteQueriesManager.onRemoteQueryRemoved(async (event) => {
|
||||
const item = this.treeDataProvider.allHistory.find(
|
||||
(i) => i.t === "remote" && i.queryId === event.queryId,
|
||||
);
|
||||
if (item) {
|
||||
await this.removeRemoteQuery(item as RemoteQueryHistoryItem);
|
||||
}
|
||||
});
|
||||
|
||||
const queryStatusUpdateSubscription =
|
||||
this.remoteQueriesManager.onRemoteQueryStatusUpdate(async (event) => {
|
||||
const item = this.treeDataProvider.allHistory.find(
|
||||
(i) => i.t === "remote" && i.queryId === event.queryId,
|
||||
);
|
||||
if (item) {
|
||||
const remoteQueryHistoryItem = item as RemoteQueryHistoryItem;
|
||||
remoteQueryHistoryItem.status = event.status;
|
||||
remoteQueryHistoryItem.failureReason = event.failureReason;
|
||||
remoteQueryHistoryItem.resultCount = event.resultCount;
|
||||
if (event.status === QueryStatus.Completed) {
|
||||
remoteQueryHistoryItem.completed = true;
|
||||
}
|
||||
await this.refreshTreeView();
|
||||
} else {
|
||||
void extLogger.log(
|
||||
"Variant analysis status update event received for unknown variant analysis",
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
this.push(queryAddedSubscription);
|
||||
this.push(queryRemovedSubscription);
|
||||
this.push(queryStatusUpdateSubscription);
|
||||
}
|
||||
|
||||
async readQueryHistory(): Promise<void> {
|
||||
void extLogger.log(
|
||||
`Reading cached query history from '${this.queryMetadataStorageLocation}'.`,
|
||||
@@ -538,13 +480,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.treeDataProvider.allHistory = history;
|
||||
await Promise.all(
|
||||
this.treeDataProvider.allHistory.map(async (item) => {
|
||||
if (item.t === "remote") {
|
||||
await this.remoteQueriesManager.rehydrateRemoteQuery(
|
||||
item.queryId,
|
||||
item.remoteQuery,
|
||||
item.status,
|
||||
);
|
||||
}
|
||||
if (item.t === "variant-analysis") {
|
||||
await this.variantAnalysisManager.rehydrateVariantAnalysis(
|
||||
item.variantAnalysis,
|
||||
@@ -586,9 +521,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
case "local":
|
||||
queryPath = finalSingleItem.initialInfo.queryPath;
|
||||
break;
|
||||
case "remote":
|
||||
queryPath = finalSingleItem.remoteQuery.queryFilePath;
|
||||
break;
|
||||
default:
|
||||
assertNever(finalSingleItem);
|
||||
}
|
||||
@@ -614,12 +546,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return this.treeDataProvider.getCurrent();
|
||||
}
|
||||
|
||||
getRemoteQueryById(queryId: string): RemoteQueryHistoryItem | undefined {
|
||||
return this.treeDataProvider.allHistory.find(
|
||||
(i) => i.t === "remote" && i.queryId === queryId,
|
||||
) as RemoteQueryHistoryItem;
|
||||
}
|
||||
|
||||
async removeDeletedQueries() {
|
||||
await Promise.all(
|
||||
this.treeDataProvider.allHistory.map(async (item) => {
|
||||
@@ -656,8 +582,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
// We need to delete it from disk as well.
|
||||
await item.completedQuery?.query.deleteQuery();
|
||||
}
|
||||
} else if (item.t === "remote") {
|
||||
await this.removeRemoteQuery(item);
|
||||
} else if (item.t === "variant-analysis") {
|
||||
await this.removeVariantAnalysis(item);
|
||||
} else {
|
||||
@@ -674,20 +598,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
private async removeRemoteQuery(item: RemoteQueryHistoryItem): Promise<void> {
|
||||
// Remote queries can be removed locally, but not remotely.
|
||||
// The user must cancel the query on GitHub Actions explicitly.
|
||||
this.treeDataProvider.remove(item);
|
||||
void extLogger.log(`Deleted ${this.labelProvider.getLabel(item)}.`);
|
||||
if (item.status === QueryStatus.InProgress) {
|
||||
void extLogger.log(
|
||||
"The variant analysis is still running on GitHub Actions. To cancel there, you must go to the workflow run in your browser.",
|
||||
);
|
||||
}
|
||||
|
||||
await this.remoteQueriesManager.removeRemoteQuery(item.queryId);
|
||||
}
|
||||
|
||||
private async removeVariantAnalysis(
|
||||
item: VariantAnalysisHistoryItem,
|
||||
): Promise<void> {
|
||||
@@ -883,8 +793,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (queryHistoryItem.completedQuery) {
|
||||
return queryHistoryItem.completedQuery.query.querySaveDir;
|
||||
}
|
||||
} else if (queryHistoryItem.t === "remote") {
|
||||
return join(this.queryStorageDir, queryHistoryItem.queryId);
|
||||
} else if (queryHistoryItem.t === "variant-analysis") {
|
||||
return this.variantAnalysisManager.getVariantAnalysisStorageLocation(
|
||||
queryHistoryItem.variantAnalysis.id,
|
||||
@@ -915,12 +823,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
"timestamp",
|
||||
);
|
||||
}
|
||||
} else if (finalSingleItem.t === "remote") {
|
||||
externalFilePath = join(
|
||||
this.queryStorageDir,
|
||||
finalSingleItem.queryId,
|
||||
"timestamp",
|
||||
);
|
||||
} else if (finalSingleItem.t === "variant-analysis") {
|
||||
externalFilePath = join(
|
||||
this.variantAnalysisManager.getVariantAnalysisStorageLocation(
|
||||
@@ -1087,11 +989,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (item.status === QueryStatus.InProgress) {
|
||||
if (item.t === "local") {
|
||||
item.cancel();
|
||||
} else if (item.t === "remote") {
|
||||
void showAndLogInformationMessage(
|
||||
"Cancelling variant analysis. This may take a while.",
|
||||
);
|
||||
await cancelRemoteQuery(this.app.credentials, item.remoteQuery);
|
||||
} else if (item.t === "variant-analysis") {
|
||||
await commands.executeCommand(
|
||||
"codeQL.cancelVariantAnalysis",
|
||||
@@ -1293,12 +1190,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalSingleItem.t === "remote") {
|
||||
await commands.executeCommand(
|
||||
"codeQL.copyRepoList",
|
||||
finalSingleItem.queryId,
|
||||
);
|
||||
} else if (finalSingleItem.t === "variant-analysis") {
|
||||
if (finalSingleItem.t === "variant-analysis") {
|
||||
await commands.executeCommand(
|
||||
"codeQL.copyVariantAnalysisRepoList",
|
||||
finalSingleItem.variantAnalysis.id,
|
||||
@@ -1319,13 +1211,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
// Remote queries and variant analysis only
|
||||
if (finalSingleItem.t === "remote") {
|
||||
await commands.executeCommand(
|
||||
"codeQL.exportRemoteQueryResults",
|
||||
finalSingleItem.queryId,
|
||||
);
|
||||
} else if (finalSingleItem.t === "variant-analysis") {
|
||||
// Variant analysis only
|
||||
if (finalSingleItem.t === "variant-analysis") {
|
||||
await commands.executeCommand(
|
||||
"codeQL.exportVariantAnalysisResults",
|
||||
finalSingleItem.variantAnalysis.id,
|
||||
@@ -1558,10 +1445,10 @@ the file in the file explorer and dragging it into the workspace.`,
|
||||
WebviewReveal.Forced,
|
||||
false,
|
||||
);
|
||||
} else if (item.t === "remote") {
|
||||
await this.remoteQueriesManager.openRemoteQueryResults(item.queryId);
|
||||
} else if (item.t === "variant-analysis") {
|
||||
await this.variantAnalysisManager.showView(item.variantAnalysis.id);
|
||||
} else {
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
} from "./pure/helpers-pure";
|
||||
import { CompletedQueryInfo, LocalQueryInfo } from "./query-results";
|
||||
import { QueryHistoryInfo } from "./query-history/query-history-info";
|
||||
import { QueryStatus } from "./query-status";
|
||||
import { QueryEvaluationInfo } from "./run-queries-shared";
|
||||
import { QueryResultType } from "./pure/legacy-messages";
|
||||
import { redactableError } from "./pure/errors";
|
||||
@@ -33,58 +32,57 @@ export async function deserializeQueryHistory(
|
||||
}
|
||||
|
||||
const queries = obj.queries;
|
||||
const parsedQueries = queries.map((q: QueryHistoryInfo) => {
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
if (q.t === "local") {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
const parsedQueries = queries
|
||||
// Remove remote queries, which are not supported anymore.
|
||||
.filter((q: QueryHistoryInfo | { t: "remote" }) => q.t !== "remote")
|
||||
.map((q: QueryHistoryInfo) => {
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
if (q.t === "local") {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
|
||||
Object.setPrototypeOf(
|
||||
q.completedQuery.query,
|
||||
QueryEvaluationInfo.prototype,
|
||||
);
|
||||
// deserialized queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => {
|
||||
/**/
|
||||
};
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(
|
||||
q.completedQuery,
|
||||
CompletedQueryInfo.prototype,
|
||||
);
|
||||
Object.setPrototypeOf(
|
||||
q.completedQuery.query,
|
||||
QueryEvaluationInfo.prototype,
|
||||
);
|
||||
// deserialized queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => {
|
||||
/**/
|
||||
};
|
||||
|
||||
// Previously, there was a typo in the completedQuery type. There was a field
|
||||
// `sucessful` and it was renamed to `successful`. We need to handle this case.
|
||||
if ("sucessful" in q.completedQuery) {
|
||||
(q.completedQuery as any).successful = (
|
||||
q.completedQuery as any
|
||||
).sucessful;
|
||||
delete (q.completedQuery as any).sucessful;
|
||||
}
|
||||
// Previously, there was a typo in the completedQuery type. There was a field
|
||||
// `sucessful` and it was renamed to `successful`. We need to handle this case.
|
||||
if ("sucessful" in q.completedQuery) {
|
||||
(q.completedQuery as any).successful = (
|
||||
q.completedQuery as any
|
||||
).sucessful;
|
||||
delete (q.completedQuery as any).sucessful;
|
||||
}
|
||||
|
||||
if (!("successful" in q.completedQuery)) {
|
||||
(q.completedQuery as any).successful =
|
||||
q.completedQuery.result?.resultType === QueryResultType.SUCCESS;
|
||||
if (!("successful" in q.completedQuery)) {
|
||||
(q.completedQuery as any).successful =
|
||||
q.completedQuery.result?.resultType === QueryResultType.SUCCESS;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (q.t === "remote") {
|
||||
// A bug was introduced that didn't set the completed flag in query history
|
||||
// items. The following code makes sure that the flag is set in order to
|
||||
// "patch" older query history items.
|
||||
if (q.status === QueryStatus.Completed) {
|
||||
q.completed = true;
|
||||
}
|
||||
}
|
||||
return q;
|
||||
});
|
||||
return q;
|
||||
});
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
return asyncFilter(parsedQueries, async (q) => {
|
||||
if (q.t === "remote" || q.t === "variant-analysis") {
|
||||
if (q.t === "variant-analysis") {
|
||||
// the deserializer doesn't know where the remote queries are stored
|
||||
// so we need to assume here that they exist. Later, we check to
|
||||
// see if they exist on disk.
|
||||
|
||||
@@ -1,259 +0,0 @@
|
||||
import { pathExists } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
import { extname } from "path";
|
||||
import { CancellationToken } from "vscode";
|
||||
|
||||
import { Logger } from "../common";
|
||||
import { downloadArtifactFromLink } from "./gh-api/gh-actions-api-client";
|
||||
import { AnalysisSummary } from "./shared/remote-query-result";
|
||||
import {
|
||||
AnalysisResults,
|
||||
AnalysisAlert,
|
||||
AnalysisRawResults,
|
||||
} from "./shared/analysis-result";
|
||||
import { UserCancellationException } from "../commandRunner";
|
||||
import { sarifParser } from "../sarif-parser";
|
||||
import { extractAnalysisAlerts } from "./sarif-processing";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { extractRawResults } from "./bqrs-processing";
|
||||
import { asyncFilter, getErrorMessage } from "../pure/helpers-pure";
|
||||
import { createDownloadPath } from "./download-link";
|
||||
import { App } from "../common/app";
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for each remote query.
|
||||
// The key is the queryId and is also the name of the directory where results are stored.
|
||||
private readonly analysesResults: Map<string, AnalysisResults[]>;
|
||||
|
||||
constructor(
|
||||
private readonly app: App,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
readonly storagePath: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
this.analysesResults = new Map();
|
||||
}
|
||||
|
||||
public async downloadAnalysisResults(
|
||||
analysisSummary: AnalysisSummary,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>,
|
||||
): Promise<void> {
|
||||
if (this.isAnalysisInMemory(analysisSummary)) {
|
||||
// We already have the results for this analysis in memory, don't download again.
|
||||
return;
|
||||
}
|
||||
|
||||
void this.logger.log(
|
||||
`Downloading and processing results for ${analysisSummary.nwo}`,
|
||||
);
|
||||
|
||||
await this.downloadSingleAnalysisResults(analysisSummary, publishResults);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (
|
||||
analysesResults: AnalysisResults[],
|
||||
) => Promise<void> = () => Promise.resolve(),
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToLoad.filter(
|
||||
(x) => !this.isAnalysisInMemory(x),
|
||||
);
|
||||
|
||||
void this.logger.log("Downloading and processing analyses results");
|
||||
|
||||
const batchSize = 3;
|
||||
const numOfBatches = Math.ceil(analysesToDownload.length / batchSize);
|
||||
const allFailures = [];
|
||||
|
||||
for (let i = 0; i < analysesToDownload.length; i += batchSize) {
|
||||
if (token?.isCancellationRequested) {
|
||||
throw new UserCancellationException(
|
||||
"Downloading of analyses results has been cancelled",
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
const batch = analysesToDownload.slice(i, i + batchSize);
|
||||
const batchTasks = batch.map((analysis) =>
|
||||
this.downloadSingleAnalysisResults(analysis, publishResults),
|
||||
);
|
||||
|
||||
const nwos = batch.map((a) => a.nwo).join(", ");
|
||||
void this.logger.log(
|
||||
`Downloading batch ${
|
||||
Math.floor(i / batchSize) + 1
|
||||
} of ${numOfBatches} (${nwos})`,
|
||||
);
|
||||
|
||||
const taskResults = await Promise.allSettled(batchTasks);
|
||||
const failedTasks = taskResults.filter(
|
||||
(x) => x.status === "rejected",
|
||||
) as PromiseRejectedResult[];
|
||||
if (failedTasks.length > 0) {
|
||||
const failures = failedTasks.map((t) => t.reason.message);
|
||||
failures.forEach((f) => void this.logger.log(f));
|
||||
allFailures.push(...failures);
|
||||
}
|
||||
}
|
||||
|
||||
if (allFailures.length > 0) {
|
||||
throw Error(allFailures.join(EOL));
|
||||
}
|
||||
}
|
||||
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.internalGetAnalysesResults(queryId)];
|
||||
}
|
||||
|
||||
private internalGetAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return this.analysesResults.get(queryId) || [];
|
||||
}
|
||||
|
||||
public removeAnalysesResults(queryId: string) {
|
||||
this.analysesResults.delete(queryId);
|
||||
}
|
||||
|
||||
private async downloadSingleAnalysisResults(
|
||||
analysis: AnalysisSummary,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>,
|
||||
): Promise<void> {
|
||||
const analysisResults: AnalysisResults = {
|
||||
nwo: analysis.nwo,
|
||||
status: "InProgress",
|
||||
interpretedResults: [],
|
||||
resultCount: analysis.resultCount,
|
||||
starCount: analysis.starCount,
|
||||
lastUpdated: analysis.lastUpdated,
|
||||
};
|
||||
const queryId = analysis.downloadLink.queryId;
|
||||
const resultsForQuery = this.internalGetAnalysesResults(queryId);
|
||||
resultsForQuery.push(analysisResults);
|
||||
this.analysesResults.set(queryId, resultsForQuery);
|
||||
void publishResults([...resultsForQuery]);
|
||||
const pos = resultsForQuery.length - 1;
|
||||
|
||||
let artifactPath;
|
||||
try {
|
||||
artifactPath = await downloadArtifactFromLink(
|
||||
this.app.credentials,
|
||||
this.storagePath,
|
||||
analysis.downloadLink,
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Could not download the analysis results for ${
|
||||
analysis.nwo
|
||||
}: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(
|
||||
analysis.nwo,
|
||||
analysis.databaseSha,
|
||||
);
|
||||
|
||||
let newAnaysisResults: AnalysisResults;
|
||||
const fileExtension = extname(artifactPath);
|
||||
if (fileExtension === ".sarif") {
|
||||
const queryResults = await this.readSarifResults(
|
||||
artifactPath,
|
||||
fileLinkPrefix,
|
||||
);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
interpretedResults: queryResults,
|
||||
status: "Completed",
|
||||
};
|
||||
} else if (fileExtension === ".bqrs") {
|
||||
const queryResults = await this.readBqrsResults(
|
||||
artifactPath,
|
||||
fileLinkPrefix,
|
||||
analysis.sourceLocationPrefix,
|
||||
);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
rawResults: queryResults,
|
||||
status: "Completed",
|
||||
};
|
||||
} else {
|
||||
void this.logger.log(
|
||||
`Cannot download results. File type '${fileExtension}' not supported.`,
|
||||
);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
status: "Failed",
|
||||
};
|
||||
}
|
||||
resultsForQuery[pos] = newAnaysisResults;
|
||||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
public async loadDownloadedAnalyses(allAnalysesToCheck: AnalysisSummary[]) {
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, (x) =>
|
||||
this.isAnalysisDownloaded(x),
|
||||
);
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(
|
||||
analysis: AnalysisSummary,
|
||||
): Promise<boolean> {
|
||||
return await pathExists(
|
||||
createDownloadPath(this.storagePath, analysis.downloadLink),
|
||||
);
|
||||
}
|
||||
|
||||
private async readBqrsResults(
|
||||
filePath: string,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string,
|
||||
): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(
|
||||
this.cliServer,
|
||||
this.logger,
|
||||
filePath,
|
||||
fileLinkPrefix,
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
}
|
||||
|
||||
private async readSarifResults(
|
||||
filePath: string,
|
||||
fileLinkPrefix: string,
|
||||
): Promise<AnalysisAlert[]> {
|
||||
const sarifLog = await sarifParser(filePath);
|
||||
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(
|
||||
`Error processing SARIF file: ${EOL}${processedSarif.errors.join(EOL)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return processedSarif.alerts;
|
||||
}
|
||||
|
||||
private isAnalysisInMemory(analysis: AnalysisSummary): boolean {
|
||||
return this.internalGetAnalysesResults(analysis.downloadLink.queryId).some(
|
||||
(x) => x.nwo === analysis.nwo,
|
||||
);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(nwo: string, sha: string): string {
|
||||
return `https://github.com/${nwo}/blob/${sha}`;
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,8 @@ import { join } from "path";
|
||||
import { ensureDir, writeFile } from "fs-extra";
|
||||
|
||||
import {
|
||||
commands,
|
||||
CancellationToken,
|
||||
commands,
|
||||
Uri,
|
||||
ViewColumn,
|
||||
window,
|
||||
@@ -14,15 +14,11 @@ import { showInformationMessageWithAction } from "../helpers";
|
||||
import { extLogger } from "../common";
|
||||
import { QueryHistoryManager } from "../query-history/query-history-manager";
|
||||
import { createGist } from "./gh-api/gh-api-client";
|
||||
import { RemoteQueriesManager } from "./remote-queries-manager";
|
||||
import {
|
||||
generateMarkdown,
|
||||
generateVariantAnalysisMarkdown,
|
||||
MarkdownFile,
|
||||
RepositorySummary,
|
||||
} from "./remote-queries-markdown-generation";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import { AnalysisResults, sumAnalysesResults } from "./shared/analysis-result";
|
||||
import { pluralize } from "../pure/word";
|
||||
import { VariantAnalysisManager } from "./variant-analysis-manager";
|
||||
import { assertNever } from "../pure/helpers-pure";
|
||||
@@ -51,12 +47,7 @@ export async function exportSelectedRemoteQueryResults(
|
||||
);
|
||||
}
|
||||
|
||||
if (queryHistoryItem.t === "remote") {
|
||||
return commands.executeCommand(
|
||||
"codeQL.exportRemoteQueryResults",
|
||||
queryHistoryItem.queryId,
|
||||
);
|
||||
} else if (queryHistoryItem.t === "variant-analysis") {
|
||||
if (queryHistoryItem.t === "variant-analysis") {
|
||||
return commands.executeCommand(
|
||||
"codeQL.exportVariantAnalysisResults",
|
||||
queryHistoryItem.variantAnalysis.id,
|
||||
@@ -66,73 +57,6 @@ export async function exportSelectedRemoteQueryResults(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports the results of the given remote query.
|
||||
* The user is prompted to select the export format.
|
||||
*/
|
||||
export async function exportRemoteQueryResults(
|
||||
queryHistoryManager: QueryHistoryManager,
|
||||
remoteQueriesManager: RemoteQueriesManager,
|
||||
queryId: string,
|
||||
credentials: Credentials,
|
||||
): Promise<void> {
|
||||
const queryHistoryItem = queryHistoryManager.getRemoteQueryById(queryId);
|
||||
if (!queryHistoryItem) {
|
||||
void extLogger.log(`Could not find query with id ${queryId}`);
|
||||
throw new Error(
|
||||
"There was an error when trying to retrieve variant analysis information",
|
||||
);
|
||||
}
|
||||
|
||||
if (!queryHistoryItem.completed) {
|
||||
throw new Error("Variant analysis results are not yet available.");
|
||||
}
|
||||
|
||||
void extLogger.log(
|
||||
`Exporting variant analysis results for query: ${queryHistoryItem.queryId}`,
|
||||
);
|
||||
const query = queryHistoryItem.remoteQuery;
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(
|
||||
queryHistoryItem.queryId,
|
||||
);
|
||||
|
||||
const exportFormat = await determineExportFormat();
|
||||
if (!exportFormat) {
|
||||
return;
|
||||
}
|
||||
|
||||
const exportDirectory =
|
||||
await queryHistoryManager.getQueryHistoryItemDirectory(queryHistoryItem);
|
||||
const exportedResultsDirectory = join(exportDirectory, "exported-results");
|
||||
|
||||
await exportRemoteQueryAnalysisResults(
|
||||
exportedResultsDirectory,
|
||||
query,
|
||||
analysesResults,
|
||||
exportFormat,
|
||||
credentials,
|
||||
);
|
||||
}
|
||||
|
||||
export async function exportRemoteQueryAnalysisResults(
|
||||
exportedResultsPath: string,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
exportFormat: "gist" | "local",
|
||||
credentials: Credentials,
|
||||
) {
|
||||
const description = buildGistDescription(query, analysesResults);
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, exportFormat);
|
||||
|
||||
await exportResults(
|
||||
exportedResultsPath,
|
||||
description,
|
||||
markdownFiles,
|
||||
exportFormat,
|
||||
credentials,
|
||||
);
|
||||
}
|
||||
|
||||
const MAX_VARIANT_ANALYSIS_EXPORT_PROGRESS_STEPS = 2;
|
||||
|
||||
/**
|
||||
@@ -396,22 +320,6 @@ export async function exportToGist(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds Gist description
|
||||
* Ex: Empty Block (Go) x results (y repositories)
|
||||
*/
|
||||
const buildGistDescription = (
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
) => {
|
||||
const resultCount = sumAnalysesResults(analysesResults);
|
||||
const resultLabel = pluralize(resultCount, "result", "results");
|
||||
const repositoryLabel = query.repositoryCount
|
||||
? `(${pluralize(query.repositoryCount, "repository", "repositories")})`
|
||||
: "";
|
||||
return `${query.queryName} (${query.language}) ${resultLabel} ${repositoryLabel}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds Gist description
|
||||
* Ex: Empty Block (Go) x results (y repositories)
|
||||
|
||||
@@ -1,134 +1,5 @@
|
||||
import { join } from "path";
|
||||
import { pathExists, readFile, writeFile } from "fs-extra";
|
||||
import {
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogWarningMessage,
|
||||
tmpDir,
|
||||
} from "../../helpers";
|
||||
import { Credentials } from "../../common/authentication";
|
||||
import { extLogger } from "../../common";
|
||||
import { RemoteQueryWorkflowResult } from "../remote-query-workflow-result";
|
||||
import { DownloadLink, createDownloadPath } from "../download-link";
|
||||
import { RemoteQuery } from "../remote-query";
|
||||
import {
|
||||
RemoteQueryFailureIndexItem,
|
||||
RemoteQueryResultIndex,
|
||||
RemoteQuerySuccessIndexItem,
|
||||
} from "../remote-query-result-index";
|
||||
import { asError, getErrorMessage } from "../../pure/helpers-pure";
|
||||
import { unzipFile } from "../../pure/zip";
|
||||
import { VariantAnalysis } from "../shared/variant-analysis";
|
||||
import { redactableError } from "../../pure/errors";
|
||||
|
||||
export const RESULT_INDEX_ARTIFACT_NAME = "result-index";
|
||||
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
sha?: string;
|
||||
results_count: number;
|
||||
bqrs_file_size: number;
|
||||
sarif_file_size?: number;
|
||||
source_location_prefix: string;
|
||||
}
|
||||
|
||||
interface ApiFailureIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
interface ApiResultIndex {
|
||||
successes: ApiSuccessIndexItem[];
|
||||
failures: ApiFailureIndexItem[];
|
||||
}
|
||||
|
||||
export async function getRemoteQueryIndex(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery,
|
||||
): Promise<RemoteQueryResultIndex | undefined> {
|
||||
const controllerRepo = remoteQuery.controllerRepository;
|
||||
const owner = controllerRepo.owner;
|
||||
const repoName = controllerRepo.name;
|
||||
const workflowRunId = remoteQuery.actionsWorkflowRunId;
|
||||
|
||||
const workflowUri = `https://github.com/${owner}/${repoName}/actions/runs/${workflowRunId}`;
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(
|
||||
credentials,
|
||||
owner,
|
||||
repoName,
|
||||
workflowRunId,
|
||||
);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName(
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
artifactList,
|
||||
);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await getResultIndex(
|
||||
credentials,
|
||||
owner,
|
||||
repoName,
|
||||
resultIndexArtifactId,
|
||||
);
|
||||
|
||||
const successes = resultIndex?.successes.map((item) => {
|
||||
const artifactId = getArtifactIDfromName(
|
||||
item.id,
|
||||
workflowUri,
|
||||
artifactList,
|
||||
);
|
||||
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
artifactId,
|
||||
nwo: item.nwo,
|
||||
sha: item.sha,
|
||||
resultCount: item.results_count,
|
||||
bqrsFileSize: item.bqrs_file_size,
|
||||
sarifFileSize: item.sarif_file_size,
|
||||
sourceLocationPrefix: item.source_location_prefix,
|
||||
} as RemoteQuerySuccessIndexItem;
|
||||
});
|
||||
|
||||
const failures = resultIndex?.failures.map((item) => {
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
nwo: item.nwo,
|
||||
error: item.error,
|
||||
} as RemoteQueryFailureIndexItem;
|
||||
});
|
||||
|
||||
return {
|
||||
artifactsUrlPath,
|
||||
successes: successes || [],
|
||||
failures: failures || [],
|
||||
};
|
||||
}
|
||||
|
||||
export async function cancelRemoteQuery(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery,
|
||||
): Promise<void> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const {
|
||||
actionsWorkflowRunId,
|
||||
controllerRepository: { owner, name },
|
||||
} = remoteQuery;
|
||||
const response = await octokit.request(
|
||||
`POST /repos/${owner}/${name}/actions/runs/${actionsWorkflowRunId}/cancel`,
|
||||
);
|
||||
if (response.status >= 300) {
|
||||
throw new Error(
|
||||
`Error cancelling variant analysis: ${response.status} ${
|
||||
response?.data?.message || ""
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function cancelVariantAnalysis(
|
||||
credentials: Credentials,
|
||||
@@ -150,357 +21,3 @@ export async function cancelVariantAnalysis(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadArtifactFromLink(
|
||||
credentials: Credentials,
|
||||
storagePath: string,
|
||||
downloadLink: DownloadLink,
|
||||
): Promise<string> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(
|
||||
`GET ${downloadLink.urlPath}/zip`,
|
||||
{},
|
||||
);
|
||||
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, "zip");
|
||||
|
||||
await unzipBuffer(response.data as ArrayBuffer, zipFilePath, extractedPath);
|
||||
}
|
||||
return join(extractedPath, downloadLink.innerFilePath || "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a specific artifact is present in the list of artifacts of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the artifact for.
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @returns A boolean indicating if the artifact is available.
|
||||
*/
|
||||
export async function isArtifactAvailable(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
artifactName: string,
|
||||
): Promise<boolean> {
|
||||
const artifactList = await listWorkflowRunArtifacts(
|
||||
credentials,
|
||||
owner,
|
||||
repo,
|
||||
workflowRunId,
|
||||
);
|
||||
|
||||
return tryGetArtifactIDfromName(artifactName, artifactList) !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the result index artifact and extracts the result index items.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns An object containing the result index.
|
||||
*/
|
||||
async function getResultIndex(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number,
|
||||
): Promise<ApiResultIndex | undefined> {
|
||||
const artifactPath = await downloadArtifact(
|
||||
credentials,
|
||||
owner,
|
||||
repo,
|
||||
artifactId,
|
||||
);
|
||||
const indexFilePath = join(artifactPath, "index.json");
|
||||
if (!(await pathExists(indexFilePath))) {
|
||||
void showAndLogWarningMessage(
|
||||
"Could not find an `index.json` file in the result artifact.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await readFile(join(artifactPath, "index.json"), "utf8");
|
||||
|
||||
try {
|
||||
return JSON.parse(resultIndex);
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid result index file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns The workflow run status.
|
||||
*/
|
||||
export async function getWorkflowStatus(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
): Promise<RemoteQueryWorkflowResult> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const workflowRun = await octokit.rest.actions.getWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId,
|
||||
});
|
||||
|
||||
if (workflowRun.data.status === "completed") {
|
||||
if (workflowRun.data.conclusion === "success") {
|
||||
return { status: "CompletedSuccessfully" };
|
||||
} else {
|
||||
const error = getWorkflowError(workflowRun.data.conclusion);
|
||||
return { status: "CompletedUnsuccessfully", error };
|
||||
}
|
||||
}
|
||||
|
||||
return { status: "InProgress" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the workflow run artifacts for the given workflow run ID.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to list artifacts for.
|
||||
* @returns An array of artifact details (including artifact name and ID).
|
||||
*/
|
||||
async function listWorkflowRunArtifacts(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
) {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
// There are limits on the number of artifacts that are returned by the API
|
||||
// so we use paging to make sure we retrieve all of them.
|
||||
let morePages = true;
|
||||
let pageNum = 1;
|
||||
const allArtifacts = [];
|
||||
|
||||
while (morePages) {
|
||||
const response = await octokit.rest.actions.listWorkflowRunArtifacts({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId,
|
||||
per_page: 100,
|
||||
page: pageNum,
|
||||
});
|
||||
|
||||
allArtifacts.push(...response.data.artifacts);
|
||||
pageNum++;
|
||||
if (response.data.artifacts.length < 100) {
|
||||
morePages = false;
|
||||
}
|
||||
}
|
||||
|
||||
return allArtifacts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name.
|
||||
*/
|
||||
function getArtifactIDfromName(
|
||||
artifactName: string,
|
||||
workflowUri: string,
|
||||
artifacts: Array<{ id: number; name: string }>,
|
||||
): number {
|
||||
const artifactId = tryGetArtifactIDfromName(artifactName, artifacts);
|
||||
|
||||
if (!artifactId) {
|
||||
const errorMessage = `Could not find artifact with name ${artifactName} in workflow ${workflowUri}.
|
||||
Please check whether the workflow run has successfully completed.`;
|
||||
throw Error(errorMessage);
|
||||
}
|
||||
|
||||
return artifactId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name, if it exists.
|
||||
*/
|
||||
function tryGetArtifactIDfromName(
|
||||
artifactName: string,
|
||||
artifacts: Array<{ id: number; name: string }>,
|
||||
): number | undefined {
|
||||
const artifact = artifacts.find((a) => a.name === artifactName);
|
||||
|
||||
return artifact?.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an artifact from a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param artifactId The ID of the artifact to download.
|
||||
* @returns The path to the enclosing directory of the unzipped artifact.
|
||||
*/
|
||||
async function downloadArtifact(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number,
|
||||
): Promise<string> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.rest.actions.downloadArtifact({
|
||||
owner,
|
||||
repo,
|
||||
artifact_id: artifactId,
|
||||
archive_format: "zip",
|
||||
});
|
||||
const artifactPath = join(tmpDir.name, `${artifactId}`);
|
||||
await unzipBuffer(
|
||||
response.data as ArrayBuffer,
|
||||
`${artifactPath}.zip`,
|
||||
artifactPath,
|
||||
);
|
||||
return artifactPath;
|
||||
}
|
||||
|
||||
async function unzipBuffer(
|
||||
data: ArrayBuffer,
|
||||
filePath: string,
|
||||
destinationPath: string,
|
||||
): Promise<void> {
|
||||
void extLogger.log(`Saving file to ${filePath}`);
|
||||
await writeFile(filePath, Buffer.from(data));
|
||||
|
||||
void extLogger.log(`Unzipping file to ${destinationPath}`);
|
||||
await unzipFile(filePath, destinationPath);
|
||||
}
|
||||
|
||||
function getWorkflowError(conclusion: string | null): string {
|
||||
if (!conclusion) {
|
||||
return "Workflow finished without a conclusion";
|
||||
}
|
||||
|
||||
if (conclusion === "cancelled") {
|
||||
return "Variant analysis execution was cancelled.";
|
||||
}
|
||||
|
||||
if (conclusion === "timed_out") {
|
||||
return "Variant analysis execution timed out.";
|
||||
}
|
||||
|
||||
if (conclusion === "failure") {
|
||||
// TODO: Get the actual error from the workflow or potentially
|
||||
// from an artifact from the action itself.
|
||||
return "Variant analysis execution has failed.";
|
||||
}
|
||||
|
||||
return `Unexpected variant analysis execution conclusion: ${conclusion}`;
|
||||
}
|
||||
|
||||
const repositoriesMetadataQuery = `query Stars($repos: String!, $pageSize: Int!, $cursor: String) {
|
||||
search(
|
||||
query: $repos
|
||||
type: REPOSITORY
|
||||
first: $pageSize
|
||||
after: $cursor
|
||||
) {
|
||||
edges {
|
||||
node {
|
||||
... on Repository {
|
||||
name
|
||||
owner {
|
||||
login
|
||||
}
|
||||
stargazerCount
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
cursor
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
type RepositoriesMetadataQueryResponse = {
|
||||
search: {
|
||||
edges: Array<{
|
||||
cursor: string;
|
||||
node: {
|
||||
name: string;
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
stargazerCount: number;
|
||||
updatedAt: string; // Actually a ISO Date string
|
||||
};
|
||||
}>;
|
||||
};
|
||||
};
|
||||
|
||||
export type RepositoriesMetadata = Record<
|
||||
string,
|
||||
{ starCount: number; lastUpdated: number }
|
||||
>;
|
||||
|
||||
export async function getRepositoriesMetadata(
|
||||
credentials: Credentials,
|
||||
nwos: string[],
|
||||
pageSize = 100,
|
||||
): Promise<RepositoriesMetadata> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const repos = `repo:${nwos.join(" repo:")} fork:true`;
|
||||
let cursor = null;
|
||||
const metadata: RepositoriesMetadata = {};
|
||||
try {
|
||||
do {
|
||||
const response: RepositoriesMetadataQueryResponse = await octokit.graphql(
|
||||
{
|
||||
query: repositoriesMetadataQuery,
|
||||
repos,
|
||||
pageSize,
|
||||
cursor,
|
||||
},
|
||||
);
|
||||
cursor =
|
||||
response.search.edges.length === pageSize
|
||||
? response.search.edges[pageSize - 1].cursor
|
||||
: null;
|
||||
|
||||
for (const edge of response.search.edges) {
|
||||
const node = edge.node;
|
||||
const owner = node.owner.login;
|
||||
const name = node.name;
|
||||
const starCount = node.stargazerCount;
|
||||
// lastUpdated is always negative since it happened in the past.
|
||||
const lastUpdated = new Date(node.updatedAt).getTime() - Date.now();
|
||||
metadata[`${owner}/${name}`] = {
|
||||
starCount,
|
||||
lastUpdated,
|
||||
};
|
||||
}
|
||||
} while (cursor);
|
||||
} catch (e) {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Error retrieving repository metadata for variant analysis: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { OctokitResponse } from "@octokit/types/dist-types";
|
||||
import { Credentials } from "../../common/authentication";
|
||||
import { RemoteQueriesSubmission } from "../shared/remote-queries";
|
||||
import { VariantAnalysisSubmission } from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysis,
|
||||
@@ -8,10 +7,6 @@ import {
|
||||
VariantAnalysisSubmissionRequest,
|
||||
} from "./variant-analysis";
|
||||
import { Repository } from "./repository";
|
||||
import {
|
||||
RemoteQueriesResponse,
|
||||
RemoteQueriesSubmissionRequest,
|
||||
} from "./remote-queries";
|
||||
|
||||
export async function submitVariantAnalysis(
|
||||
credentials: Credentials,
|
||||
@@ -116,40 +111,3 @@ export async function createGist(
|
||||
}
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
export async function submitRemoteQueries(
|
||||
credentials: Credentials,
|
||||
submissionDetails: RemoteQueriesSubmission,
|
||||
): Promise<RemoteQueriesResponse> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const {
|
||||
ref,
|
||||
language,
|
||||
repositories,
|
||||
repositoryLists,
|
||||
repositoryOwners,
|
||||
queryPack,
|
||||
controllerRepoId,
|
||||
} = submissionDetails;
|
||||
|
||||
const data: RemoteQueriesSubmissionRequest = {
|
||||
ref,
|
||||
language,
|
||||
repositories,
|
||||
repository_lists: repositoryLists,
|
||||
repository_owners: repositoryOwners,
|
||||
query_pack: queryPack,
|
||||
};
|
||||
|
||||
const response: OctokitResponse<RemoteQueriesResponse> =
|
||||
await octokit.request(
|
||||
"POST /repositories/:controllerRepoId/code-scanning/codeql/queries",
|
||||
{
|
||||
controllerRepoId,
|
||||
data,
|
||||
},
|
||||
);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
export interface RemoteQueriesSubmissionRequest {
|
||||
ref: string;
|
||||
language: string;
|
||||
repositories?: string[];
|
||||
repository_lists?: string[];
|
||||
repository_owners?: string[];
|
||||
query_pack: string;
|
||||
}
|
||||
|
||||
export interface RemoteQueriesResponse {
|
||||
workflow_run_id: number;
|
||||
errors?: {
|
||||
invalid_repositories?: string[];
|
||||
repositories_without_database?: string[];
|
||||
private_repositories?: string[];
|
||||
cutoff_repositories?: string[];
|
||||
cutoff_repositories_count?: number;
|
||||
};
|
||||
repositories_queried: string[];
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
import { EOL } from "os";
|
||||
import { Credentials } from "../common/authentication";
|
||||
import { RepositorySelection } from "./repository-selection";
|
||||
import { Repository } from "./shared/repository";
|
||||
import { RemoteQueriesResponse } from "./gh-api/remote-queries";
|
||||
import { submitRemoteQueries } from "./gh-api/gh-api-client";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogInformationMessage,
|
||||
} from "../helpers";
|
||||
import { asError, getErrorMessage } from "../pure/helpers-pure";
|
||||
import { pluralize } from "../pure/word";
|
||||
import { redactableError } from "../pure/errors";
|
||||
|
||||
export async function runRemoteQueriesApiRequest(
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repoSelection: RepositorySelection,
|
||||
controllerRepo: Repository,
|
||||
queryPackBase64: string,
|
||||
): Promise<void | RemoteQueriesResponse> {
|
||||
try {
|
||||
const response = await submitRemoteQueries(credentials, {
|
||||
ref,
|
||||
language,
|
||||
repositories: repoSelection.repositories,
|
||||
repositoryLists: repoSelection.repositoryLists,
|
||||
repositoryOwners: repoSelection.owners,
|
||||
queryPack: queryPackBase64,
|
||||
controllerRepoId: controllerRepo.id,
|
||||
});
|
||||
const { popupMessage, logMessage } = parseResponse(
|
||||
controllerRepo,
|
||||
response,
|
||||
);
|
||||
void showAndLogInformationMessage(popupMessage, {
|
||||
fullMessage: logMessage,
|
||||
});
|
||||
return response;
|
||||
} catch (error: any) {
|
||||
if (error.status === 404) {
|
||||
void showAndLogErrorMessage(
|
||||
`Controller repository was not found. Please make sure it's a valid repo name.${eol}`,
|
||||
);
|
||||
} else {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(error),
|
||||
)`Error submitting remote queries request: ${getErrorMessage(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const eol = EOL;
|
||||
const eol2 = EOL + EOL;
|
||||
|
||||
// exported for testing only
|
||||
export function parseResponse(
|
||||
controllerRepo: Repository,
|
||||
response: RemoteQueriesResponse,
|
||||
) {
|
||||
const repositoriesQueried = response.repositories_queried;
|
||||
const repositoryCount = repositoriesQueried.length;
|
||||
|
||||
const popupMessage = `Successfully scheduled runs on ${pluralize(
|
||||
repositoryCount,
|
||||
"repository",
|
||||
"repositories",
|
||||
)}. [Click here to see the progress](https://github.com/${
|
||||
controllerRepo.fullName
|
||||
}/actions/runs/${response.workflow_run_id}).${
|
||||
response.errors
|
||||
? `${eol2}Some repositories could not be scheduled. See extension log for details.`
|
||||
: ""
|
||||
}`;
|
||||
|
||||
let logMessage = `Successfully scheduled runs on ${pluralize(
|
||||
repositoryCount,
|
||||
"repository",
|
||||
"repositories",
|
||||
)}. See https://github.com/${controllerRepo.fullName}/actions/runs/${
|
||||
response.workflow_run_id
|
||||
}.`;
|
||||
logMessage += `${eol2}Repositories queried:${eol}${repositoriesQueried.join(
|
||||
", ",
|
||||
)}`;
|
||||
if (response.errors) {
|
||||
const {
|
||||
invalid_repositories,
|
||||
repositories_without_database,
|
||||
private_repositories,
|
||||
cutoff_repositories,
|
||||
cutoff_repositories_count,
|
||||
} = response.errors;
|
||||
logMessage += `${eol2}Some repositories could not be scheduled.`;
|
||||
if (invalid_repositories?.length) {
|
||||
logMessage += `${eol2}${pluralize(
|
||||
invalid_repositories.length,
|
||||
"repository",
|
||||
"repositories",
|
||||
)} invalid and could not be found:${eol}${invalid_repositories.join(
|
||||
", ",
|
||||
)}`;
|
||||
}
|
||||
if (repositories_without_database?.length) {
|
||||
logMessage += `${eol2}${pluralize(
|
||||
repositories_without_database.length,
|
||||
"repository",
|
||||
"repositories",
|
||||
)} did not have a CodeQL database available:${eol}${repositories_without_database.join(
|
||||
", ",
|
||||
)}`;
|
||||
logMessage += `${eol}For each public repository that has not yet been added to the database service, we will try to create a database next time the store is updated.`;
|
||||
}
|
||||
if (private_repositories?.length) {
|
||||
logMessage += `${eol2}${pluralize(
|
||||
private_repositories.length,
|
||||
"repository",
|
||||
"repositories",
|
||||
)} not public:${eol}${private_repositories.join(", ")}`;
|
||||
logMessage += `${eol}When using a public controller repository, only public repositories can be queried.`;
|
||||
}
|
||||
if (cutoff_repositories_count) {
|
||||
logMessage += `${eol2}${pluralize(
|
||||
cutoff_repositories_count,
|
||||
"repository",
|
||||
"repositories",
|
||||
)} over the limit for a single request`;
|
||||
if (cutoff_repositories) {
|
||||
logMessage += `:${eol}${cutoff_repositories.join(", ")}`;
|
||||
if (cutoff_repositories_count !== cutoff_repositories.length) {
|
||||
const moreRepositories =
|
||||
cutoff_repositories_count - cutoff_repositories.length;
|
||||
logMessage += `${eol}...${eol}And another ${pluralize(
|
||||
moreRepositories,
|
||||
"repository",
|
||||
"repositories",
|
||||
)}.`;
|
||||
}
|
||||
} else {
|
||||
logMessage += ".";
|
||||
}
|
||||
logMessage += `${eol}Repositories were selected based on how recently they had been updated.`;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
popupMessage,
|
||||
logMessage,
|
||||
};
|
||||
}
|
||||
@@ -1,519 +0,0 @@
|
||||
import {
|
||||
CancellationToken,
|
||||
commands,
|
||||
EventEmitter,
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
env,
|
||||
} from "vscode";
|
||||
import { nanoid } from "nanoid";
|
||||
import { join } from "path";
|
||||
import { writeFile, readFile, remove, pathExists } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { ProgressCallback } from "../commandRunner";
|
||||
import {
|
||||
createTimestampFile,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
} from "../helpers";
|
||||
import { Logger } from "../common";
|
||||
import { prepareRemoteQueryRun } from "./run-remote-query";
|
||||
import { RemoteQueriesView } from "./remote-queries-view";
|
||||
import { buildRemoteQueryEntity, RemoteQuery } from "./remote-query";
|
||||
import { RemoteQueriesMonitor } from "./remote-queries-monitor";
|
||||
import {
|
||||
getRemoteQueryIndex,
|
||||
getRepositoriesMetadata,
|
||||
RepositoriesMetadata,
|
||||
} from "./gh-api/gh-actions-api-client";
|
||||
import { RemoteQueryResultIndex } from "./remote-query-result-index";
|
||||
import {
|
||||
RemoteQueryResult,
|
||||
sumAnalysisSummariesResults,
|
||||
} from "./remote-query-result";
|
||||
import { DownloadLink } from "./download-link";
|
||||
import { AnalysesResultsManager } from "./analyses-results-manager";
|
||||
import { asError, assertNever, getErrorMessage } from "../pure/helpers-pure";
|
||||
import { QueryStatus } from "../query-status";
|
||||
import { DisposableObject } from "../pure/disposable-object";
|
||||
import { AnalysisResults } from "./shared/analysis-result";
|
||||
import { runRemoteQueriesApiRequest } from "./remote-queries-api";
|
||||
import { App } from "../common/app";
|
||||
import { redactableError } from "../pure/errors";
|
||||
|
||||
const autoDownloadMaxSize = 300 * 1024;
|
||||
const autoDownloadMaxCount = 100;
|
||||
|
||||
const noop = () => {
|
||||
/* do nothing */
|
||||
};
|
||||
|
||||
export interface NewQueryEvent {
|
||||
queryId: string;
|
||||
query: RemoteQuery;
|
||||
}
|
||||
|
||||
export interface RemovedQueryEvent {
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface UpdatedQueryStatusEvent {
|
||||
queryId: string;
|
||||
status: QueryStatus;
|
||||
failureReason?: string;
|
||||
repositoryCount?: number;
|
||||
resultCount?: number;
|
||||
}
|
||||
|
||||
export class RemoteQueriesManager extends DisposableObject {
|
||||
public readonly onRemoteQueryAdded;
|
||||
public readonly onRemoteQueryRemoved;
|
||||
public readonly onRemoteQueryStatusUpdate;
|
||||
|
||||
private readonly remoteQueryAddedEventEmitter;
|
||||
private readonly remoteQueryRemovedEventEmitter;
|
||||
private readonly remoteQueryStatusUpdateEventEmitter;
|
||||
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly view: RemoteQueriesView;
|
||||
|
||||
constructor(
|
||||
ctx: ExtensionContext,
|
||||
private readonly app: App,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
super();
|
||||
this.analysesResultsManager = new AnalysesResultsManager(
|
||||
app,
|
||||
cliServer,
|
||||
storagePath,
|
||||
logger,
|
||||
);
|
||||
this.view = new RemoteQueriesView(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(logger);
|
||||
|
||||
this.remoteQueryAddedEventEmitter = this.push(
|
||||
new EventEmitter<NewQueryEvent>(),
|
||||
);
|
||||
this.remoteQueryRemovedEventEmitter = this.push(
|
||||
new EventEmitter<RemovedQueryEvent>(),
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter = this.push(
|
||||
new EventEmitter<UpdatedQueryStatusEvent>(),
|
||||
);
|
||||
this.onRemoteQueryAdded = this.remoteQueryAddedEventEmitter.event;
|
||||
this.onRemoteQueryRemoved = this.remoteQueryRemovedEventEmitter.event;
|
||||
this.onRemoteQueryStatusUpdate =
|
||||
this.remoteQueryStatusUpdateEventEmitter.event;
|
||||
|
||||
this.push(this.view);
|
||||
}
|
||||
|
||||
public async rehydrateRemoteQuery(
|
||||
queryId: string,
|
||||
query: RemoteQuery,
|
||||
status: QueryStatus,
|
||||
) {
|
||||
if (!(await this.queryRecordExists(queryId))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace.
|
||||
this.remoteQueryRemovedEventEmitter.fire({ queryId });
|
||||
} else if (status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
void commands.executeCommand("codeQL.monitorRemoteQuery", queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
public async removeRemoteQuery(queryId: string) {
|
||||
this.analysesResultsManager.removeAnalysesResults(queryId);
|
||||
await this.removeStorageDirectory(queryId);
|
||||
}
|
||||
|
||||
public async openRemoteQueryResults(queryId: string) {
|
||||
try {
|
||||
const remoteQuery = (await this.retrieveJsonFile(
|
||||
queryId,
|
||||
"query.json",
|
||||
)) as RemoteQuery;
|
||||
const remoteQueryResult = (await this.retrieveJsonFile(
|
||||
queryId,
|
||||
"query-result.json",
|
||||
)) as RemoteQueryResult;
|
||||
|
||||
// Open results in the background
|
||||
void this.openResults(remoteQuery, remoteQueryResult).then(
|
||||
noop,
|
||||
(e: unknown) =>
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Could not open query results. ${getErrorMessage(e)}`,
|
||||
),
|
||||
);
|
||||
} catch (e) {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Could not open query results. ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public async runRemoteQuery(
|
||||
uri: Uri | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<void> {
|
||||
const {
|
||||
actionBranch,
|
||||
base64Pack,
|
||||
repoSelection,
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
controllerRepo,
|
||||
queryStartTime,
|
||||
language,
|
||||
} = await prepareRemoteQueryRun(
|
||||
this.cliServer,
|
||||
this.app.credentials,
|
||||
uri,
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
|
||||
const apiResponse = await runRemoteQueriesApiRequest(
|
||||
this.app.credentials,
|
||||
actionBranch,
|
||||
language,
|
||||
repoSelection,
|
||||
controllerRepo,
|
||||
base64Pack,
|
||||
);
|
||||
|
||||
if (!apiResponse) {
|
||||
return;
|
||||
}
|
||||
|
||||
const workflowRunId = apiResponse.workflow_run_id;
|
||||
const repositoryCount = apiResponse.repositories_queried.length;
|
||||
const query = await buildRemoteQueryEntity(
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
controllerRepo,
|
||||
queryStartTime,
|
||||
workflowRunId,
|
||||
language,
|
||||
repositoryCount,
|
||||
);
|
||||
|
||||
const queryId = this.createQueryId();
|
||||
|
||||
await this.prepareStorageDirectory(queryId);
|
||||
await this.storeJsonFile(queryId, "query.json", query);
|
||||
|
||||
this.remoteQueryAddedEventEmitter.fire({ queryId, query });
|
||||
void commands.executeCommand("codeQL.monitorRemoteQuery", queryId, query);
|
||||
}
|
||||
|
||||
public async monitorRemoteQuery(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
cancellationToken: CancellationToken,
|
||||
): Promise<void> {
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(
|
||||
remoteQuery,
|
||||
this.app.credentials,
|
||||
cancellationToken,
|
||||
);
|
||||
|
||||
const executionEndTime = Date.now();
|
||||
|
||||
if (queryWorkflowResult.status === "CompletedSuccessfully") {
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
} else if (queryWorkflowResult.status === "CompletedUnsuccessfully") {
|
||||
if (queryWorkflowResult.error?.includes("cancelled")) {
|
||||
// Workflow was cancelled on the server
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: "Cancelled",
|
||||
});
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
void showAndLogInformationMessage("Variant analysis was cancelled");
|
||||
} else {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: queryWorkflowResult.error,
|
||||
});
|
||||
void showAndLogErrorMessage(
|
||||
`Variant analysis execution failed. Error: ${queryWorkflowResult.error}`,
|
||||
);
|
||||
}
|
||||
} else if (queryWorkflowResult.status === "Cancelled") {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: "Cancelled",
|
||||
});
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
void showAndLogInformationMessage("Variant analysis was cancelled");
|
||||
} else if (queryWorkflowResult.status === "InProgress") {
|
||||
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Unexpected status: ${queryWorkflowResult.status}`,
|
||||
);
|
||||
} else {
|
||||
// Ensure all cases are covered
|
||||
assertNever(queryWorkflowResult.status);
|
||||
}
|
||||
}
|
||||
|
||||
public async autoDownloadRemoteQueryResults(
|
||||
queryResult: RemoteQueryResult,
|
||||
token: CancellationToken,
|
||||
): Promise<void> {
|
||||
const analysesToDownload = queryResult.analysisSummaries
|
||||
.filter((a) => a.fileSizeInBytes < autoDownloadMaxSize)
|
||||
.slice(0, autoDownloadMaxCount)
|
||||
.map((a) => ({
|
||||
nwo: a.nwo,
|
||||
databaseSha: a.databaseSha,
|
||||
resultCount: a.resultCount,
|
||||
sourceLocationPrefix: a.sourceLocationPrefix,
|
||||
downloadLink: a.downloadLink,
|
||||
fileSize: String(a.fileSizeInBytes),
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
(results) => this.view.setAnalysisResults(results, queryResult.queryId),
|
||||
);
|
||||
}
|
||||
|
||||
public async copyRemoteQueryRepoListToClipboard(queryId: string) {
|
||||
const queryResult = await this.getRemoteQueryResult(queryId);
|
||||
const repos = queryResult.analysisSummaries
|
||||
.filter((a) => a.resultCount > 0)
|
||||
.map((a) => a.nwo);
|
||||
|
||||
if (repos.length > 0) {
|
||||
const text = [
|
||||
'"new-repo-list": [',
|
||||
...repos.slice(0, -1).map((repo) => ` "${repo}",`),
|
||||
` "${repos[repos.length - 1]}"`,
|
||||
"]",
|
||||
];
|
||||
|
||||
await env.clipboard.writeText(text.join(EOL));
|
||||
}
|
||||
}
|
||||
|
||||
private mapQueryResult(
|
||||
executionEndTime: number,
|
||||
resultIndex: RemoteQueryResultIndex,
|
||||
queryId: string,
|
||||
metadata: RepositoriesMetadata,
|
||||
): RemoteQueryResult {
|
||||
const analysisSummaries = resultIndex.successes.map((item) => ({
|
||||
nwo: item.nwo,
|
||||
databaseSha: item.sha || "HEAD",
|
||||
resultCount: item.resultCount,
|
||||
sourceLocationPrefix: item.sourceLocationPrefix,
|
||||
fileSizeInBytes: item.sarifFileSize
|
||||
? item.sarifFileSize
|
||||
: item.bqrsFileSize,
|
||||
starCount: metadata[item.nwo]?.starCount,
|
||||
lastUpdated: metadata[item.nwo]?.lastUpdated,
|
||||
downloadLink: {
|
||||
id: item.artifactId.toString(),
|
||||
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
|
||||
innerFilePath: item.sarifFileSize ? "results.sarif" : "results.bqrs",
|
||||
queryId,
|
||||
} as DownloadLink,
|
||||
}));
|
||||
const analysisFailures = resultIndex.failures.map((item) => ({
|
||||
nwo: item.nwo,
|
||||
error: item.error,
|
||||
}));
|
||||
|
||||
return {
|
||||
executionEndTime,
|
||||
analysisSummaries,
|
||||
analysisFailures,
|
||||
queryId,
|
||||
};
|
||||
}
|
||||
|
||||
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
await this.view.showResults(query, queryResult);
|
||||
}
|
||||
|
||||
private async askToOpenResults(
|
||||
query: RemoteQuery,
|
||||
queryResult: RemoteQueryResult,
|
||||
): Promise<void> {
|
||||
const totalResultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(
|
||||
message,
|
||||
"View",
|
||||
);
|
||||
if (shouldOpenView) {
|
||||
await this.openResults(query, queryResult);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique id for this query, suitable for determining the storage location for the downloaded query artifacts.
|
||||
* @returns A unique id for this query.
|
||||
*/
|
||||
private createQueryId(): string {
|
||||
return nanoid();
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a directory for storing analysis results for a single query run.
|
||||
* This directory contains a timestamp file, which will be
|
||||
* used by the query history manager to determine when the directory
|
||||
* should be deleted.
|
||||
*
|
||||
*/
|
||||
private async prepareStorageDirectory(queryId: string): Promise<void> {
|
||||
await createTimestampFile(join(this.storagePath, queryId));
|
||||
}
|
||||
|
||||
private async getRemoteQueryResult(
|
||||
queryId: string,
|
||||
): Promise<RemoteQueryResult> {
|
||||
return await this.retrieveJsonFile<RemoteQueryResult>(
|
||||
queryId,
|
||||
"query-result.json",
|
||||
);
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(
|
||||
queryId: string,
|
||||
fileName: string,
|
||||
obj: T,
|
||||
): Promise<void> {
|
||||
const filePath = join(this.storagePath, queryId, fileName);
|
||||
await writeFile(filePath, JSON.stringify(obj, null, 2), "utf8");
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(
|
||||
queryId: string,
|
||||
fileName: string,
|
||||
): Promise<T> {
|
||||
const filePath = join(this.storagePath, queryId, fileName);
|
||||
return JSON.parse(await readFile(filePath, "utf8"));
|
||||
}
|
||||
|
||||
private async removeStorageDirectory(queryId: string): Promise<void> {
|
||||
const filePath = join(this.storagePath, queryId);
|
||||
await remove(filePath);
|
||||
}
|
||||
|
||||
private async queryRecordExists(queryId: string): Promise<boolean> {
|
||||
const filePath = join(this.storagePath, queryId);
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether there's a result index artifact available for the given query.
|
||||
* If so, set the query status to `Completed` and auto-download the results.
|
||||
*/
|
||||
private async downloadAvailableResults(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
executionEndTime: number,
|
||||
): Promise<void> {
|
||||
const resultIndex = await getRemoteQueryIndex(
|
||||
this.app.credentials,
|
||||
remoteQuery,
|
||||
);
|
||||
if (resultIndex) {
|
||||
const metadata = await this.getRepositoriesMetadata(resultIndex);
|
||||
const queryResult = this.mapQueryResult(
|
||||
executionEndTime,
|
||||
resultIndex,
|
||||
queryId,
|
||||
metadata,
|
||||
);
|
||||
const resultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Completed,
|
||||
repositoryCount: queryResult.analysisSummaries.length,
|
||||
resultCount,
|
||||
});
|
||||
|
||||
await this.storeJsonFile(queryId, "query-result.json", queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand(
|
||||
"codeQL.autoDownloadRemoteQueryResults",
|
||||
queryResult,
|
||||
);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(remoteQuery, queryResult).then(
|
||||
noop,
|
||||
(e: unknown) =>
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Could not open query results. ${getErrorMessage(e)}`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
const controllerRepo = `${remoteQuery.controllerRepository.owner}/${remoteQuery.controllerRepository.name}`;
|
||||
const workflowRunUrl = `https://github.com/${controllerRepo}/actions/runs/${remoteQuery.actionsWorkflowRunId}`;
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`There was an issue retrieving the result for the query [${remoteQuery.queryName}](${workflowRunUrl}).`,
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async getRepositoriesMetadata(resultIndex: RemoteQueryResultIndex) {
|
||||
const nwos = resultIndex.successes.map((s) => s.nwo);
|
||||
return await getRepositoriesMetadata(this.app.credentials, nwos);
|
||||
}
|
||||
|
||||
// Pulled from the analysis results manager, so that we can get access to
|
||||
// analyses results from the "export results" command.
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.analysesResultsManager.getAnalysesResults(queryId)];
|
||||
}
|
||||
}
|
||||
@@ -3,14 +3,11 @@ import { tryGetRemoteLocation } from "../pure/bqrs-utils";
|
||||
import { createRemoteFileRef } from "../pure/location-link-utils";
|
||||
import { parseHighlightedLine, shouldHighlightLine } from "../pure/sarif-utils";
|
||||
import { convertNonPrintableChars } from "../text-utils";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import {
|
||||
AnalysisAlert,
|
||||
AnalysisRawResults,
|
||||
AnalysisResults,
|
||||
CodeSnippet,
|
||||
FileLink,
|
||||
getAnalysisResultCount,
|
||||
HighlightedRegion,
|
||||
} from "./shared/analysis-result";
|
||||
import {
|
||||
@@ -27,54 +24,6 @@ export interface MarkdownFile {
|
||||
content: string[]; // Each array item is a line of the markdown file.
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates markdown files with variant analysis results.
|
||||
*/
|
||||
export function generateMarkdown(
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
linkType: MarkdownLinkType,
|
||||
): MarkdownFile[] {
|
||||
const resultsFiles: MarkdownFile[] = [];
|
||||
// Generate summary file with links to individual files
|
||||
const summaryFile: MarkdownFile = generateMarkdownSummary(query);
|
||||
for (const analysisResult of analysesResults) {
|
||||
const resultsCount = getAnalysisResultCount(analysisResult);
|
||||
if (resultsCount === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append nwo and results count to the summary table
|
||||
const nwo = analysisResult.nwo;
|
||||
const fileName = createFileName(nwo);
|
||||
const link = createRelativeLink(fileName, linkType);
|
||||
summaryFile.content.push(
|
||||
`| ${nwo} | [${resultsCount} result(s)](${link}) |`,
|
||||
);
|
||||
|
||||
// Generate individual markdown file for each repository
|
||||
const resultsFileContent = [`### ${analysisResult.nwo}`, ""];
|
||||
for (const interpretedResult of analysisResult.interpretedResults) {
|
||||
const individualResult = generateMarkdownForInterpretedResult(
|
||||
interpretedResult,
|
||||
query.language,
|
||||
);
|
||||
resultsFileContent.push(...individualResult);
|
||||
}
|
||||
if (analysisResult.rawResults) {
|
||||
const rawResultTable = generateMarkdownForRawResults(
|
||||
analysisResult.rawResults,
|
||||
);
|
||||
resultsFileContent.push(...rawResultTable);
|
||||
}
|
||||
resultsFiles.push({
|
||||
fileName,
|
||||
content: resultsFileContent,
|
||||
});
|
||||
}
|
||||
return [summaryFile, ...resultsFiles];
|
||||
}
|
||||
|
||||
export interface RepositorySummary {
|
||||
fileName: string;
|
||||
repository: RepositoryWithMetadata;
|
||||
@@ -153,27 +102,6 @@ export async function generateVariantAnalysisMarkdown(
|
||||
};
|
||||
}
|
||||
|
||||
export function generateMarkdownSummary(query: RemoteQuery): MarkdownFile {
|
||||
const lines: string[] = [];
|
||||
// Title
|
||||
lines.push(`### Results for "${query.queryName}"`, "");
|
||||
|
||||
// Expandable section containing query text
|
||||
const queryCodeBlock = ["```ql", ...query.queryText.split("\n"), "```"];
|
||||
lines.push(...buildExpandableMarkdownSection("Query", queryCodeBlock));
|
||||
|
||||
// Padding between sections
|
||||
lines.push("<br />", "");
|
||||
|
||||
// Summary table
|
||||
lines.push("### Summary", "", "| Repository | Results |", "| --- | --- |");
|
||||
// nwo and result count will be appended to this table
|
||||
return {
|
||||
fileName: "_summary",
|
||||
content: lines,
|
||||
};
|
||||
}
|
||||
|
||||
export function generateVariantAnalysisMarkdownSummary(
|
||||
variantAnalysis: VariantAnalysis,
|
||||
summaries: RepositorySummary[],
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import * as vscode from "vscode";
|
||||
import { Logger } from "../common";
|
||||
import { Credentials } from "../common/authentication";
|
||||
import { sleep } from "../pure/time";
|
||||
import {
|
||||
getWorkflowStatus,
|
||||
isArtifactAvailable,
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
} from "./gh-api/gh-actions-api-client";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import { RemoteQueryWorkflowResult } from "./remote-query-workflow-result";
|
||||
|
||||
export class RemoteQueriesMonitor {
|
||||
// With a sleep of 5 seconds, the maximum number of attempts takes
|
||||
// us to just over 2 days worth of monitoring.
|
||||
private static readonly maxAttemptCount = 17280;
|
||||
private static readonly sleepTime = 5000;
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
public async monitorQuery(
|
||||
remoteQuery: RemoteQuery,
|
||||
credentials: Credentials,
|
||||
cancellationToken: vscode.CancellationToken,
|
||||
): Promise<RemoteQueryWorkflowResult> {
|
||||
let attemptCount = 0;
|
||||
|
||||
while (attemptCount <= RemoteQueriesMonitor.maxAttemptCount) {
|
||||
await sleep(RemoteQueriesMonitor.sleepTime);
|
||||
|
||||
if (cancellationToken && cancellationToken.isCancellationRequested) {
|
||||
return { status: "Cancelled" };
|
||||
}
|
||||
|
||||
const workflowStatus = await getWorkflowStatus(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId,
|
||||
);
|
||||
|
||||
// Even if the workflow indicates it has completed, artifacts
|
||||
// might still take a while to become available. So we need to
|
||||
// check for the artifact before we can declare the workflow
|
||||
// as having completed.
|
||||
if (workflowStatus.status === "CompletedSuccessfully") {
|
||||
const resultIndexAvailable = await isArtifactAvailable(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId,
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
);
|
||||
|
||||
if (resultIndexAvailable) {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
// We don't have a result-index yet, so we'll keep monitoring.
|
||||
} else if (workflowStatus.status !== "InProgress") {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
void this.logger.log("Variant analysis monitoring timed out after 2 days");
|
||||
return { status: "Cancelled" };
|
||||
}
|
||||
}
|
||||
@@ -1,299 +0,0 @@
|
||||
import {
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
workspace,
|
||||
commands,
|
||||
} from "vscode";
|
||||
import { basename } from "path";
|
||||
|
||||
import {
|
||||
ToRemoteQueriesMessage,
|
||||
FromRemoteQueriesMessage,
|
||||
RemoteQueryDownloadAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage,
|
||||
} from "../pure/interface-types";
|
||||
import { Logger } from "../common";
|
||||
import { assertNever } from "../pure/helpers-pure";
|
||||
import {
|
||||
AnalysisSummary,
|
||||
RemoteQueryResult,
|
||||
sumAnalysisSummariesResults,
|
||||
} from "./remote-query-result";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import {
|
||||
AnalysisSummary as AnalysisResultViewModel,
|
||||
RemoteQueryResult as RemoteQueryResultViewModel,
|
||||
} from "./shared/remote-query-result";
|
||||
import { showAndLogWarningMessage } from "../helpers";
|
||||
import { URLSearchParams } from "url";
|
||||
import { SHOW_QUERY_TEXT_MSG } from "../query-history/query-history-manager";
|
||||
import { AnalysesResultsManager } from "./analyses-results-manager";
|
||||
import { AnalysisResults } from "./shared/analysis-result";
|
||||
import { humanizeUnit } from "../pure/time";
|
||||
import { AbstractWebview, WebviewPanelConfig } from "../abstract-webview";
|
||||
import { telemetryListener } from "../telemetry";
|
||||
|
||||
export class RemoteQueriesView extends AbstractWebview<
|
||||
ToRemoteQueriesMessage,
|
||||
FromRemoteQueriesMessage
|
||||
> {
|
||||
private currentQueryId: string | undefined;
|
||||
|
||||
constructor(
|
||||
ctx: ExtensionContext,
|
||||
private readonly logger: Logger,
|
||||
private readonly analysesResultsManager: AnalysesResultsManager,
|
||||
) {
|
||||
super(ctx);
|
||||
this.panelLoadedCallBacks.push(() => {
|
||||
void logger.log("Variant analysis results view loaded");
|
||||
});
|
||||
}
|
||||
|
||||
async showResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
const panel = await this.getPanel();
|
||||
panel.reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
this.currentQueryId = queryResult.queryId;
|
||||
|
||||
await this.postMessage({
|
||||
t: "setRemoteQueryResult",
|
||||
queryResult: model,
|
||||
});
|
||||
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(
|
||||
model.analysisSummaries,
|
||||
);
|
||||
|
||||
await this.setAnalysisResults(
|
||||
this.analysesResultsManager.getAnalysesResults(queryResult.queryId),
|
||||
queryResult.queryId,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds up a model tailored to the view based on the query and result domain entities.
|
||||
* The data is cleaned up, sorted where necessary, and transformed to a format that
|
||||
* the view model can use.
|
||||
* @param query Information about the query that was run.
|
||||
* @param queryResult The result of the query.
|
||||
* @returns A fully created view model.
|
||||
*/
|
||||
private buildViewModel(
|
||||
query: RemoteQuery,
|
||||
queryResult: RemoteQueryResult,
|
||||
): RemoteQueryResultViewModel {
|
||||
const queryFileName = basename(query.queryFilePath);
|
||||
const totalResultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
const executionDuration = this.getDuration(
|
||||
queryResult.executionEndTime,
|
||||
query.executionStartTime,
|
||||
);
|
||||
const analysisSummaries = this.buildAnalysisSummaries(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
const totalRepositoryCount = queryResult.analysisSummaries.length;
|
||||
const affectedRepositories = queryResult.analysisSummaries.filter(
|
||||
(r) => r.resultCount > 0,
|
||||
);
|
||||
|
||||
return {
|
||||
queryId: queryResult.queryId,
|
||||
queryTitle: query.queryName,
|
||||
queryFileName,
|
||||
queryFilePath: query.queryFilePath,
|
||||
queryText: query.queryText,
|
||||
language: query.language,
|
||||
workflowRunUrl: `https://github.com/${query.controllerRepository.owner}/${query.controllerRepository.name}/actions/runs/${query.actionsWorkflowRunId}`,
|
||||
totalRepositoryCount,
|
||||
affectedRepositoryCount: affectedRepositories.length,
|
||||
totalResultCount,
|
||||
executionTimestamp: this.formatDate(query.executionStartTime),
|
||||
executionDuration,
|
||||
analysisSummaries,
|
||||
analysisFailures: queryResult.analysisFailures,
|
||||
};
|
||||
}
|
||||
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: "remoteQueriesView",
|
||||
title: "CodeQL Query Results",
|
||||
viewColumn: ViewColumn.Active,
|
||||
preserveFocus: true,
|
||||
view: "remote-queries",
|
||||
additionalOptions: {
|
||||
localResourceRoots: [Uri.file(this.analysesResultsManager.storagePath)],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
protected onPanelDispose(): void {
|
||||
this.currentQueryId = undefined;
|
||||
}
|
||||
|
||||
protected async onMessage(msg: FromRemoteQueriesMessage): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case "viewLoaded":
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
case "remoteQueryError":
|
||||
void this.logger.log(`Variant analysis error: ${msg.error}`);
|
||||
break;
|
||||
case "openFile":
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
case "openVirtualFile":
|
||||
await this.openVirtualFile(msg.queryText);
|
||||
break;
|
||||
case "copyRepoList":
|
||||
await commands.executeCommand("codeQL.copyRepoList", msg.queryId);
|
||||
break;
|
||||
case "remoteQueryDownloadAnalysisResults":
|
||||
await this.downloadAnalysisResults(msg);
|
||||
break;
|
||||
case "remoteQueryDownloadAllAnalysesResults":
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case "remoteQueryExportResults":
|
||||
await commands.executeCommand(
|
||||
"codeQL.exportRemoteQueryResults",
|
||||
msg.queryId,
|
||||
);
|
||||
break;
|
||||
case "telemetry":
|
||||
telemetryListener?.sendUIInteraction(msg.action);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private async openFile(filePath: string) {
|
||||
try {
|
||||
const textDocument = await workspace.openTextDocument(filePath);
|
||||
await Window.showTextDocument(textDocument, ViewColumn.One);
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage(`Could not open file: ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async openVirtualFile(text: string) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
queryText: encodeURIComponent(SHOW_QUERY_TEXT_MSG + text),
|
||||
});
|
||||
const uri = Uri.parse(
|
||||
`remote-query:query-text.ql?${params.toString()}`,
|
||||
true,
|
||||
);
|
||||
const doc = await workspace.openTextDocument(uri);
|
||||
await Window.showTextDocument(doc, { preview: false });
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage("Could not open query text");
|
||||
}
|
||||
}
|
||||
|
||||
private async downloadAnalysisResults(
|
||||
msg: RemoteQueryDownloadAnalysisResultsMessage,
|
||||
): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.downloadAnalysisResults(
|
||||
msg.analysisSummary,
|
||||
(results) => this.setAnalysisResults(results, queryId),
|
||||
);
|
||||
}
|
||||
|
||||
private async downloadAllAnalysesResults(
|
||||
msg: RemoteQueryDownloadAllAnalysesResultsMessage,
|
||||
): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
(results) => this.setAnalysisResults(results, queryId),
|
||||
);
|
||||
}
|
||||
|
||||
public async setAnalysisResults(
|
||||
analysesResults: AnalysisResults[],
|
||||
queryId: string | undefined,
|
||||
): Promise<void> {
|
||||
if (this.panel?.active && this.currentQueryId === queryId) {
|
||||
await this.postMessage({
|
||||
t: "setAnalysesResults",
|
||||
analysesResults,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private getDuration(startTime: number, endTime: number): string {
|
||||
const diffInMs = startTime - endTime;
|
||||
return humanizeUnit(diffInMs);
|
||||
}
|
||||
|
||||
private formatDate = (millis: number): string => {
|
||||
const d = new Date(millis);
|
||||
const datePart = d.toLocaleDateString(undefined, {
|
||||
day: "numeric",
|
||||
month: "short",
|
||||
});
|
||||
const timePart = d.toLocaleTimeString(undefined, {
|
||||
hour: "numeric",
|
||||
minute: "numeric",
|
||||
hour12: true,
|
||||
});
|
||||
return `${datePart} at ${timePart}`;
|
||||
};
|
||||
|
||||
private formatFileSize(bytes: number): string {
|
||||
const kb = bytes / 1024;
|
||||
const mb = kb / 1024;
|
||||
const gb = mb / 1024;
|
||||
|
||||
if (bytes < 1024) {
|
||||
return `${bytes} bytes`;
|
||||
} else if (kb < 1024) {
|
||||
return `${kb.toFixed(2)} KB`;
|
||||
} else if (mb < 1024) {
|
||||
return `${mb.toFixed(2)} MB`;
|
||||
} else {
|
||||
return `${gb.toFixed(2)} GB`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds up a list of analysis summaries, in a data structure tailored to the view.
|
||||
* @param analysisSummaries The summaries of a specific analyses.
|
||||
* @returns A fully created view model.
|
||||
*/
|
||||
private buildAnalysisSummaries(
|
||||
analysisSummaries: AnalysisSummary[],
|
||||
): AnalysisResultViewModel[] {
|
||||
const filteredAnalysisSummaries = analysisSummaries.filter(
|
||||
(r) => r.resultCount > 0,
|
||||
);
|
||||
|
||||
const sortedAnalysisSummaries = filteredAnalysisSummaries.sort(
|
||||
(a, b) => b.resultCount - a.resultCount,
|
||||
);
|
||||
|
||||
return sortedAnalysisSummaries.map((analysisResult) => ({
|
||||
nwo: analysisResult.nwo,
|
||||
databaseSha: analysisResult.databaseSha || "HEAD",
|
||||
resultCount: analysisResult.resultCount,
|
||||
downloadLink: analysisResult.downloadLink,
|
||||
sourceLocationPrefix: analysisResult.sourceLocationPrefix,
|
||||
fileSize: this.formatFileSize(analysisResult.fileSizeInBytes),
|
||||
starCount: analysisResult.starCount,
|
||||
lastUpdated: analysisResult.lastUpdated,
|
||||
}));
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
import { QueryStatus } from "../query-status";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
|
||||
/**
|
||||
* Information about a remote query.
|
||||
*/
|
||||
export interface RemoteQueryHistoryItem {
|
||||
readonly t: "remote";
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
readonly queryId: string;
|
||||
remoteQuery: RemoteQuery;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
export interface RemoteQueryResultIndex {
|
||||
artifactsUrlPath: string;
|
||||
successes: RemoteQuerySuccessIndexItem[];
|
||||
failures: RemoteQueryFailureIndexItem[];
|
||||
}
|
||||
|
||||
export interface RemoteQuerySuccessIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
sha?: string;
|
||||
resultCount: number;
|
||||
bqrsFileSize: number;
|
||||
sarifFileSize?: number;
|
||||
sourceLocationPrefix: string;
|
||||
}
|
||||
|
||||
export interface RemoteQueryFailureIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
error: string;
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
import { DownloadLink } from "./download-link";
|
||||
import { AnalysisFailure } from "./shared/analysis-failure";
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
executionEndTime: number; // Can't use a Date here since it needs to be serialized and desserialized.
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
analysisFailures: AnalysisFailure[];
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string;
|
||||
databaseSha: string;
|
||||
resultCount: number;
|
||||
sourceLocationPrefix: string;
|
||||
downloadLink: DownloadLink;
|
||||
fileSizeInBytes: number;
|
||||
starCount?: number;
|
||||
lastUpdated?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sums up the number of results for all repos queried via a remote query.
|
||||
*/
|
||||
export const sumAnalysisSummariesResults = (
|
||||
analysisSummaries: AnalysisSummary[],
|
||||
): number => {
|
||||
return analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import { VariantAnalysis } from "./shared/variant-analysis";
|
||||
|
||||
export interface RemoteQuerySubmissionResult {
|
||||
queryDirPath?: string;
|
||||
query?: RemoteQuery;
|
||||
variantAnalysis?: VariantAnalysis;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
export type RemoteQueryWorkflowStatus =
|
||||
| "InProgress"
|
||||
| "CompletedSuccessfully"
|
||||
| "CompletedUnsuccessfully"
|
||||
| "Cancelled";
|
||||
|
||||
export interface RemoteQueryWorkflowResult {
|
||||
status: RemoteQueryWorkflowStatus;
|
||||
error?: string;
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import { readFile } from "fs-extra";
|
||||
import { Repository as RemoteRepository } from "./repository";
|
||||
import { QueryMetadata } from "../pure/interface-types";
|
||||
import { getQueryName } from "./run-remote-query";
|
||||
import { Repository } from "./shared/repository";
|
||||
|
||||
export interface RemoteQuery {
|
||||
queryName: string;
|
||||
queryFilePath: string;
|
||||
queryText: string;
|
||||
language: string;
|
||||
controllerRepository: RemoteRepository;
|
||||
executionStartTime: number; // Use number here since it needs to be serialized and desserialized.
|
||||
actionsWorkflowRunId: number;
|
||||
repositoryCount: number;
|
||||
}
|
||||
|
||||
export async function buildRemoteQueryEntity(
|
||||
queryFilePath: string,
|
||||
queryMetadata: QueryMetadata | undefined,
|
||||
controllerRepo: Repository,
|
||||
queryStartTime: number,
|
||||
workflowRunId: number,
|
||||
language: string,
|
||||
repositoryCount: number,
|
||||
): Promise<RemoteQuery> {
|
||||
const queryName = getQueryName(queryMetadata, queryFilePath);
|
||||
const queryText = await readFile(queryFilePath, "utf8");
|
||||
const [owner, name] = controllerRepo.fullName.split("/");
|
||||
|
||||
return {
|
||||
queryName,
|
||||
queryFilePath,
|
||||
queryText,
|
||||
language,
|
||||
controllerRepository: {
|
||||
owner,
|
||||
name,
|
||||
},
|
||||
executionStartTime: queryStartTime,
|
||||
actionsWorkflowRunId: workflowRunId,
|
||||
repositoryCount,
|
||||
};
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
export interface RemoteQueriesSubmission {
|
||||
ref: string;
|
||||
language: string;
|
||||
repositories?: string[];
|
||||
repositoryLists?: string[];
|
||||
repositoryOwners?: string[];
|
||||
queryPack: string;
|
||||
|
||||
controllerRepoId: number;
|
||||
}
|
||||
@@ -124,7 +124,7 @@ InterpretedResults.args = {
|
||||
resultCount: 198,
|
||||
interpretedResults: analysesResults.analysesResults.find(
|
||||
(v) => v.nwo === "facebook/create-react-app",
|
||||
)?.interpretedResults as unknown as AnalysisAlert[],
|
||||
)?.interpretedResults as AnalysisAlert[],
|
||||
};
|
||||
|
||||
export const RawResults = Template.bind({});
|
||||
@@ -132,7 +132,7 @@ RawResults.args = {
|
||||
...InterpretedResults.args,
|
||||
interpretedResults: undefined,
|
||||
resultCount: 1,
|
||||
rawResults: rawResults as unknown as AnalysisRawResults,
|
||||
rawResults: rawResults as AnalysisRawResults,
|
||||
};
|
||||
|
||||
export const SkippedOnlyFullName = Template.bind({});
|
||||
|
||||
@@ -38,7 +38,7 @@ const interpretedResultsForRepo = (
|
||||
nwo: string,
|
||||
): AnalysisAlert[] | undefined => {
|
||||
return analysesResults.analysesResults.find((v) => v.nwo === nwo)
|
||||
?.interpretedResults as unknown as AnalysisAlert[];
|
||||
?.interpretedResults as AnalysisAlert[];
|
||||
};
|
||||
|
||||
export const Example = Template.bind({});
|
||||
|
||||
@@ -31,7 +31,7 @@ export const CodeFlowsDropdown = ({
|
||||
return (
|
||||
<VSCodeDropdown
|
||||
onChange={
|
||||
handleChange as unknown as ((e: Event) => unknown) &
|
||||
handleChange as ((e: Event) => unknown) &
|
||||
React.FormEventHandler<HTMLElement>
|
||||
}
|
||||
>
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { RemoteQueryHistoryItem } from "../../../src/remote-queries/remote-query-history-item";
|
||||
import { QueryStatus } from "../../../src/query-status";
|
||||
|
||||
export function createMockRemoteQueryHistoryItem({
|
||||
date = new Date("2022-01-01T00:00:00.000Z"),
|
||||
status = QueryStatus.InProgress,
|
||||
failureReason = undefined,
|
||||
resultCount = undefined,
|
||||
repositoryCount = 0,
|
||||
executionStartTime = date.getTime(),
|
||||
userSpecifiedLabel = undefined,
|
||||
}: {
|
||||
date?: Date;
|
||||
status?: QueryStatus;
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
repositoryCount?: number;
|
||||
repositories?: string[];
|
||||
executionStartTime?: number;
|
||||
userSpecifiedLabel?: string;
|
||||
}): RemoteQueryHistoryItem {
|
||||
return {
|
||||
t: "remote",
|
||||
failureReason,
|
||||
resultCount,
|
||||
status,
|
||||
completed: false,
|
||||
queryId: nanoid(),
|
||||
remoteQuery: {
|
||||
queryName: "query-name",
|
||||
queryFilePath: "query-file.ql",
|
||||
queryText: "select 1",
|
||||
language: "javascript",
|
||||
controllerRepository: {
|
||||
owner: "github",
|
||||
name: "vscode-codeql-integration-tests",
|
||||
},
|
||||
executionStartTime,
|
||||
actionsWorkflowRunId: 1,
|
||||
repositoryCount,
|
||||
},
|
||||
userSpecifiedLabel,
|
||||
};
|
||||
}
|
||||
@@ -1,732 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nwo": "github/codeql",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 35,
|
||||
"endLine": 4,
|
||||
"endColumn": 44
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 3,
|
||||
"endLine": 6,
|
||||
"text": "function cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 5,
|
||||
"startColumn": 15,
|
||||
"endLine": 5,
|
||||
"endColumn": 18
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 35,
|
||||
"endLine": 4,
|
||||
"endColumn": 44
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 25,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 13,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 7,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 3,
|
||||
"endLine": 6,
|
||||
"text": "function cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 5,
|
||||
"startColumn": 15,
|
||||
"endLine": 5,
|
||||
"endColumn": 18
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 36,
|
||||
"endLine": 6,
|
||||
"endColumn": 45
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 14,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 36,
|
||||
"endLine": 6,
|
||||
"endColumn": 45
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 26,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 14,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 8,
|
||||
"startColumn": 36,
|
||||
"endLine": 8,
|
||||
"endColumn": 45
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 6,
|
||||
"endLine": 10,
|
||||
"text": "\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 8,
|
||||
"startColumn": 14,
|
||||
"endLine": 8,
|
||||
"endColumn": 54
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 6,
|
||||
"endLine": 10,
|
||||
"text": "\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 8,
|
||||
"startColumn": 36,
|
||||
"endLine": 8,
|
||||
"endColumn": 45
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 6,
|
||||
"endLine": 10,
|
||||
"text": "\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 8,
|
||||
"startColumn": 26,
|
||||
"endLine": 8,
|
||||
"endColumn": 54
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 6,
|
||||
"endLine": 10,
|
||||
"text": "\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 8,
|
||||
"startColumn": 14,
|
||||
"endLine": 8,
|
||||
"endColumn": 54
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 40,
|
||||
"endLine": 9,
|
||||
"endColumn": 49
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 7,
|
||||
"endLine": 11,
|
||||
"text": "\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n\tconst safe = \"\\\"\" + path.join(__dirname, \"temp\") + \"\\\"\";\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 18,
|
||||
"endLine": 9,
|
||||
"endColumn": 58
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 7,
|
||||
"endLine": 11,
|
||||
"text": "\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n\tconst safe = \"\\\"\" + path.join(__dirname, \"temp\") + \"\\\"\";\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 40,
|
||||
"endLine": 9,
|
||||
"endColumn": 49
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 7,
|
||||
"endLine": 11,
|
||||
"text": "\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n\tconst safe = \"\\\"\" + path.join(__dirname, \"temp\") + \"\\\"\";\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 30,
|
||||
"endLine": 9,
|
||||
"endColumn": 58
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 7,
|
||||
"endLine": 11,
|
||||
"text": "\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\texeca.shellSync('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n\n\tconst safe = \"\\\"\" + path.join(__dirname, \"temp\") + \"\\\"\";\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 18,
|
||||
"endLine": 9,
|
||||
"endColumn": 58
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"nwo": "test/no-results",
|
||||
"status": "Completed",
|
||||
"interpretedResults": []
|
||||
},
|
||||
{
|
||||
"nwo": "meteor/meteor",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 7,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 42,
|
||||
"endLine": 46,
|
||||
"text": " METEOR_LATEST_VERSION,\n extractPath: rootPath,\n meteorPath,\n release: process.env.INSTALL_METEOR_VERSION || METEOR_LATEST_VERSION,\n rootPath,\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 44,
|
||||
"startColumn": 3,
|
||||
"endLine": 44,
|
||||
"endColumn": 13
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 10,
|
||||
"endLine": 14,
|
||||
"text": "const os = require('os');\nconst {\n meteorPath,\n release,\n startedPath,\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 12,
|
||||
"startColumn": 3,
|
||||
"endLine": 12,
|
||||
"endColumn": 13
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 9,
|
||||
"endLine": 25,
|
||||
"text": "const tmp = require('tmp');\nconst os = require('os');\nconst {\n meteorPath,\n release,\n startedPath,\n extractPath,\n isWindows,\n rootPath,\n sudoUser,\n isSudo,\n isMac,\n METEOR_LATEST_VERSION,\n shouldSetupExecPath,\n} = require('./config.js');\nconst { uninstall } = require('./uninstall');\nconst {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 11,
|
||||
"startColumn": 7,
|
||||
"endLine": 23,
|
||||
"endColumn": 27
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -1,49 +0,0 @@
|
||||
### Results for "Shell command built from environment values"
|
||||
|
||||
<details>
|
||||
<summary>Query</summary>
|
||||
|
||||
```ql
|
||||
/**
|
||||
* @name Shell command built from environment values
|
||||
* @description Building a shell command string with values from the enclosing
|
||||
* environment may cause subtle bugs or vulnerabilities.
|
||||
* @kind path-problem
|
||||
* @problem.severity warning
|
||||
* @security-severity 6.3
|
||||
* @precision high
|
||||
* @id js/shell-command-injection-from-environment
|
||||
* @tags correctness
|
||||
* security
|
||||
* external/cwe/cwe-078
|
||||
* external/cwe/cwe-088
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
import semmle.javascript.security.dataflow.ShellCommandInjectionFromEnvironmentQuery
|
||||
|
||||
from
|
||||
Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink, DataFlow::Node highlight,
|
||||
Source sourceNode
|
||||
where
|
||||
sourceNode = source.getNode() and
|
||||
cfg.hasFlowPath(source, sink) and
|
||||
if cfg.isSinkWithHighlight(sink.getNode(), _)
|
||||
then cfg.isSinkWithHighlight(sink.getNode(), highlight)
|
||||
else highlight = sink.getNode()
|
||||
select highlight, source, sink, "This shell command depends on an uncontrolled $@.", sourceNode,
|
||||
sourceNode.getSourceType()
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<br />
|
||||
|
||||
### Summary
|
||||
|
||||
| Repository | Results |
|
||||
| --- | --- |
|
||||
| github/codeql | [4 result(s)](#file-github-codeql-md) |
|
||||
| meteor/meteor | [1 result(s)](#file-meteor-meteor-md) |
|
||||
@@ -1,195 +0,0 @@
|
||||
### github/codeql
|
||||
|
||||
[javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L5-L5)
|
||||
|
||||
<pre><code class="javascript">function cleanupTemp() {
|
||||
let cmd = "rm -rf " + path.join(__dirname, "temp");
|
||||
cp.execSync(<strong>cmd</strong>); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
*This shell command depends on an uncontrolled [absolute path](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L4-L4).*
|
||||
|
||||
#### Paths
|
||||
|
||||
<details>
|
||||
<summary>Path with 5 steps</summary>
|
||||
|
||||
1. [javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L4-L4)
|
||||
<pre><code class="javascript"> path = require("path");
|
||||
function cleanupTemp() {
|
||||
let cmd = "rm -rf " + path.join(<strong>__dirname</strong>, "temp");
|
||||
cp.execSync(cmd); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
2. [javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L4-L4)
|
||||
<pre><code class="javascript"> path = require("path");
|
||||
function cleanupTemp() {
|
||||
let cmd = "rm -rf " + <strong>path.join(__dirname, "temp")</strong>;
|
||||
cp.execSync(cmd); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
3. [javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L4-L4)
|
||||
<pre><code class="javascript"> path = require("path");
|
||||
function cleanupTemp() {
|
||||
let cmd = <strong>"rm -rf " + path.join(__dirname, "temp")</strong>;
|
||||
cp.execSync(cmd); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
4. [javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L4-L4)
|
||||
<pre><code class="javascript"> path = require("path");
|
||||
function cleanupTemp() {
|
||||
let <strong>cmd = "rm -rf " + path.join(__dirname, "temp")</strong>;
|
||||
cp.execSync(cmd); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
5. [javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js#L5-L5)
|
||||
<pre><code class="javascript">function cleanupTemp() {
|
||||
let cmd = "rm -rf " + path.join(__dirname, "temp");
|
||||
cp.execSync(<strong>cmd</strong>); // BAD
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L6-L6)
|
||||
|
||||
<pre><code class="javascript">(function() {
|
||||
cp.execFileSync('rm', ['-rf', path.join(__dirname, "temp")]); // GOOD
|
||||
cp.execSync(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
</code></pre>
|
||||
|
||||
*This shell command depends on an uncontrolled [absolute path](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L6-L6).*
|
||||
|
||||
#### Paths
|
||||
|
||||
<details>
|
||||
<summary>Path with 3 steps</summary>
|
||||
|
||||
1. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L6-L6)
|
||||
<pre><code class="javascript">(function() {
|
||||
cp.execFileSync('rm', ['-rf', path.join(__dirname, "temp")]); // GOOD
|
||||
cp.execSync('rm -rf ' + path.join(<strong>__dirname</strong>, "temp")); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
</code></pre>
|
||||
|
||||
2. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L6-L6)
|
||||
<pre><code class="javascript">(function() {
|
||||
cp.execFileSync('rm', ['-rf', path.join(__dirname, "temp")]); // GOOD
|
||||
cp.execSync('rm -rf ' + <strong>path.join(__dirname, "temp")</strong>); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
</code></pre>
|
||||
|
||||
3. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L6-L6)
|
||||
<pre><code class="javascript">(function() {
|
||||
cp.execFileSync('rm', ['-rf', path.join(__dirname, "temp")]); // GOOD
|
||||
cp.execSync(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L8-L8)
|
||||
|
||||
<pre><code class="javascript"> cp.execSync('rm -rf ' + path.join(__dirname, "temp")); // BAD
|
||||
|
||||
execa.shell(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
execa.shellSync('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
|
||||
</code></pre>
|
||||
|
||||
*This shell command depends on an uncontrolled [absolute path](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L8-L8).*
|
||||
|
||||
#### Paths
|
||||
|
||||
<details>
|
||||
<summary>Path with 3 steps</summary>
|
||||
|
||||
1. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L8-L8)
|
||||
<pre><code class="javascript"> cp.execSync('rm -rf ' + path.join(__dirname, "temp")); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + path.join(<strong>__dirname</strong>, "temp")); // NOT OK
|
||||
execa.shellSync('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
|
||||
</code></pre>
|
||||
|
||||
2. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L8-L8)
|
||||
<pre><code class="javascript"> cp.execSync('rm -rf ' + path.join(__dirname, "temp")); // BAD
|
||||
|
||||
execa.shell('rm -rf ' + <strong>path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
execa.shellSync('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
|
||||
</code></pre>
|
||||
|
||||
3. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L8-L8)
|
||||
<pre><code class="javascript"> cp.execSync('rm -rf ' + path.join(__dirname, "temp")); // BAD
|
||||
|
||||
execa.shell(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
execa.shellSync('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L9-L9)
|
||||
|
||||
<pre><code class="javascript">
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
execa.shellSync(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
|
||||
const safe = "\"" + path.join(__dirname, "temp") + "\"";
|
||||
</code></pre>
|
||||
|
||||
*This shell command depends on an uncontrolled [absolute path](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L9-L9).*
|
||||
|
||||
#### Paths
|
||||
|
||||
<details>
|
||||
<summary>Path with 3 steps</summary>
|
||||
|
||||
1. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L9-L9)
|
||||
<pre><code class="javascript">
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
execa.shellSync('rm -rf ' + path.join(<strong>__dirname</strong>, "temp")); // NOT OK
|
||||
|
||||
const safe = "\"" + path.join(__dirname, "temp") + "\"";
|
||||
</code></pre>
|
||||
|
||||
2. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L9-L9)
|
||||
<pre><code class="javascript">
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
execa.shellSync('rm -rf ' + <strong>path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
|
||||
const safe = "\"" + path.join(__dirname, "temp") + "\"";
|
||||
</code></pre>
|
||||
|
||||
3. [javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js](https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b/javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js#L9-L9)
|
||||
<pre><code class="javascript">
|
||||
execa.shell('rm -rf ' + path.join(__dirname, "temp")); // NOT OK
|
||||
execa.shellSync(<strong>'rm -rf ' + path.join(__dirname, "temp")</strong>); // NOT OK
|
||||
|
||||
const safe = "\"" + path.join(__dirname, "temp") + "\"";
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
----------------------------------------
|
||||
@@ -1,144 +0,0 @@
|
||||
### meteor/meteor
|
||||
|
||||
[npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(<strong>`setx path "${meteorPath}/;%path%`</strong>);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
*This shell command depends on an uncontrolled [absolute path](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/config.js#L39-L39).*
|
||||
|
||||
#### Paths
|
||||
|
||||
<details>
|
||||
<summary>Path with 11 steps</summary>
|
||||
|
||||
1. [npm-packages/meteor-installer/config.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/config.js#L39-L39)
|
||||
<pre><code class="javascript">
|
||||
const meteorLocalFolder = '.meteor';
|
||||
const meteorPath = <strong>path.resolve(rootPath, meteorLocalFolder)</strong>;
|
||||
|
||||
module.exports = {
|
||||
</code></pre>
|
||||
|
||||
2. [npm-packages/meteor-installer/config.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/config.js#L39-L39)
|
||||
<pre><code class="javascript">
|
||||
const meteorLocalFolder = '.meteor';
|
||||
const <strong>meteorPath = path.resolve(rootPath, meteorLocalFolder)</strong>;
|
||||
|
||||
module.exports = {
|
||||
</code></pre>
|
||||
|
||||
3. [npm-packages/meteor-installer/config.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/config.js#L44-L44)
|
||||
<pre><code class="javascript"> METEOR_LATEST_VERSION,
|
||||
extractPath: rootPath,
|
||||
<strong>meteorPath</strong>,
|
||||
release: process.env.INSTALL_METEOR_VERSION || METEOR_LATEST_VERSION,
|
||||
rootPath,
|
||||
</code></pre>
|
||||
|
||||
4. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L12-L12)
|
||||
<pre><code class="javascript">const os = require('os');
|
||||
const {
|
||||
<strong>meteorPath</strong>,
|
||||
release,
|
||||
startedPath,
|
||||
</code></pre>
|
||||
|
||||
5. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L11-L23)
|
||||
<pre><code class="javascript">const tmp = require('tmp');
|
||||
const os = require('os');
|
||||
const <strong>{</strong>
|
||||
<strong> meteorPath,</strong>
|
||||
<strong> release,</strong>
|
||||
<strong> startedPath,</strong>
|
||||
<strong> extractPath,</strong>
|
||||
<strong> isWindows,</strong>
|
||||
<strong> rootPath,</strong>
|
||||
<strong> sudoUser,</strong>
|
||||
<strong> isSudo,</strong>
|
||||
<strong> isMac,</strong>
|
||||
<strong> METEOR_LATEST_VERSION,</strong>
|
||||
<strong> shouldSetupExecPath,</strong>
|
||||
<strong>} = require('./config.js')</strong>;
|
||||
const { uninstall } = require('./uninstall');
|
||||
const {
|
||||
</code></pre>
|
||||
|
||||
6. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(`setx path "${<strong>meteorPath</strong>}/;%path%`);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
7. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(`setx path "${<strong>meteorPath</strong>}/;%path%`);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
8. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(`setx path "${<strong>meteorPath</strong>}/;%path%`);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
9. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(`setx path "${<strong>meteorPath</strong>}/;%path%`);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
10. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(`setx path "${<strong>meteorPath</strong>}/;%path%`);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
11. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(<strong>`setx path "${meteorPath}/;%path%`</strong>);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Path with 2 steps</summary>
|
||||
|
||||
1. [npm-packages/meteor-installer/config.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/config.js#L39-L39)
|
||||
<pre><code class="javascript">
|
||||
const meteorLocalFolder = '.meteor';
|
||||
const meteorPath = <strong>path.resolve(rootPath, meteorLocalFolder)</strong>;
|
||||
|
||||
module.exports = {
|
||||
</code></pre>
|
||||
|
||||
2. [npm-packages/meteor-installer/install.js](https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec/npm-packages/meteor-installer/install.js#L259-L259)
|
||||
<pre><code class="javascript"> if (isWindows()) {
|
||||
//set for the current session and beyond
|
||||
child_process.execSync(<strong>`setx path "${meteorPath}/;%path%`</strong>);
|
||||
return;
|
||||
}
|
||||
</code></pre>
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
----------------------------------------
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"queryName": "Shell command built from environment values",
|
||||
"queryFilePath": "c:\\git-repo\\vscode-codeql-starter\\ql\\javascript\\ql\\src\\Security\\CWE-078\\ShellCommandInjectionFromEnvironment.ql",
|
||||
"queryText": "/**\n * @name Shell command built from environment values\n * @description Building a shell command string with values from the enclosing\n * environment may cause subtle bugs or vulnerabilities.\n * @kind path-problem\n * @problem.severity warning\n * @security-severity 6.3\n * @precision high\n * @id js/shell-command-injection-from-environment\n * @tags correctness\n * security\n * external/cwe/cwe-078\n * external/cwe/cwe-088\n */\n\nimport javascript\nimport DataFlow::PathGraph\nimport semmle.javascript.security.dataflow.ShellCommandInjectionFromEnvironmentQuery\n\nfrom\n Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink, DataFlow::Node highlight,\n Source sourceNode\nwhere\n sourceNode = source.getNode() and\n cfg.hasFlowPath(source, sink) and\n if cfg.isSinkWithHighlight(sink.getNode(), _)\n then cfg.isSinkWithHighlight(sink.getNode(), highlight)\n else highlight = sink.getNode()\nselect highlight, source, sink, \"This shell command depends on an uncontrolled $@.\", sourceNode,\n sourceNode.getSourceType()\n",
|
||||
"language": "javascript",
|
||||
"controllerRepository": { "owner": "dsp-testing", "name": "qc-controller" },
|
||||
"executionStartTime": 1649419081990,
|
||||
"actionsWorkflowRunId": 2115000864
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nwo": "github/codeql",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This part of the regular expression may cause exponential backtracking on strings containing many repetitions of 'aa'."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This part of the regular expression may cause exponential backtracking on strings containing many repetitions of 'aa'.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/d094bbc06d063d0da8d0303676943c345e61de53",
|
||||
"filePath": "javascript/extractor/tests/regexp/input/multipart.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 15,
|
||||
"endLine": 22,
|
||||
"text": "\nvar bad95 = new RegExp(\n \"(a\" + \n \"|\" + \n \"aa)*\" + \n \"b$\"\n);\n\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 17,
|
||||
"startColumn": 6,
|
||||
"endLine": 20,
|
||||
"endColumn": 6
|
||||
},
|
||||
"codeFlows": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"nwo": "meteor/meteor",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This part of the regular expression may cause exponential backtracking on strings containing many repetitions of '----'."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This part of the regular expression may cause exponential backtracking on strings containing many repetitions of '----'.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12",
|
||||
"filePath": "packages/deprecated/markdown/showdown.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 413,
|
||||
"endLine": 417,
|
||||
"text": "\t\t/g,hashElement);\n\t*/\n\ttext = text.replace(/(\\n\\n[ ]{0,3}<!(--[^\\r]*?--\\s*)+>[ \\t]*(?=\\n{2,}))/g,hashElement);\n\n\t// PHP and ASP-style processor instructions (<?...?> and <%...%>)\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 415,
|
||||
"startColumn": 41,
|
||||
"endLine": 415,
|
||||
"endColumn": 48
|
||||
},
|
||||
"codeFlows": []
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This part of the regular expression may cause exponential backtracking on strings starting with '<!--' and containing many repetitions of '----'."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This part of the regular expression may cause exponential backtracking on strings starting with '<!--' and containing many repetitions of '----'.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12",
|
||||
"filePath": "packages/deprecated/markdown/showdown.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 521,
|
||||
"endLine": 525,
|
||||
"text": "\t// Build a regex to find HTML tags and comments. See Friedl's\n\t// \"Mastering Regular Expressions\", 2nd Ed., pp. 200-201.\n\tvar regex = /(<[a-z\\/!$](\"[^\"]*\"|'[^']*'|[^'\">])*>|<!(--.*?--\\s*)+>)/gi;\n\n\ttext = text.replace(regex, function(wholeMatch) {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 523,
|
||||
"startColumn": 58,
|
||||
"endLine": 523,
|
||||
"endColumn": 61
|
||||
},
|
||||
"codeFlows": []
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This part of the regular expression may cause exponential backtracking on strings starting with ''' and containing many repetitions of '\\&'."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This part of the regular expression may cause exponential backtracking on strings starting with ''' and containing many repetitions of '\\&'.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12",
|
||||
"filePath": "tools/tests/apps/modules/imports/links/acorn/src/parseutil.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 7,
|
||||
"endLine": 11,
|
||||
"text": "// ## Parser utilities\n\nconst literal = /^(?:'((?:\\\\.|[^'])*?)'|\"((?:\\\\.|[^\"])*?)\")/\npp.strictDirective = function(start) {\n for (;;) {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 24,
|
||||
"endLine": 9,
|
||||
"endColumn": 38
|
||||
},
|
||||
"codeFlows": []
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This part of the regular expression may cause exponential backtracking on strings starting with '\"' and containing many repetitions of '\\!'."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This part of the regular expression may cause exponential backtracking on strings starting with '\"' and containing many repetitions of '\\!'.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12",
|
||||
"filePath": "tools/tests/apps/modules/imports/links/acorn/src/parseutil.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 9,
|
||||
"endLine": 9,
|
||||
"text": "const literal = /^(?:'((?:\\\\.|[^'])*?)'|\"((?:\\\\.|[^\"])*?)\")/"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 9,
|
||||
"startColumn": 43,
|
||||
"endLine": 9,
|
||||
"endColumn": 57
|
||||
},
|
||||
"codeFlows": []
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This component is implicitly exported."
|
||||
}
|
||||
]
|
||||
},
|
||||
"shortDescription": "This component is implicitly exported.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/AlexRogalskiy/android-nrf-toolbox/blob/034cf3aa7d2a3a4145177de32546ca518a462a66",
|
||||
"filePath": "app/src/main/AndroidManifest.xml"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 237,
|
||||
"endLine": 251,
|
||||
"text": "\t\t</service>\n\n\t\t<activity\n\t\t\tandroid:name=\"no.nordicsemi.android.nrftoolbox.dfu.DfuInitiatorActivity\"\n\t\t\tandroid:label=\"@string/dfu_service_title\"\n\t\t\tandroid:noHistory=\"true\"\n\t\t\tandroid:theme=\"@style/AppTheme.Translucent\" >\n\t\t\t<intent-filter>\n\t\t\t\t<action android:name=\"no.nordicsemi.android.action.DFU_UPLOAD\" />\n\n\t\t\t\t<category android:name=\"android.intent.category.DEFAULT\" />\n\t\t\t</intent-filter>\n\t\t</activity>\n\n\t\t<service\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 239,
|
||||
"startColumn": 3,
|
||||
"endLine": 249,
|
||||
"endColumn": 15
|
||||
},
|
||||
"codeFlows": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -1,44 +0,0 @@
|
||||
### Results for "Inefficient regular expression"
|
||||
|
||||
<details>
|
||||
<summary>Query</summary>
|
||||
|
||||
```ql
|
||||
/**
|
||||
* @name Inefficient regular expression
|
||||
* @description A regular expression that requires exponential time to match certain inputs
|
||||
* can be a performance bottleneck, and may be vulnerable to denial-of-service
|
||||
* attacks.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @security-severity 7.5
|
||||
* @precision high
|
||||
* @id js/redos
|
||||
* @tags security
|
||||
* external/cwe/cwe-1333
|
||||
* external/cwe/cwe-730
|
||||
* external/cwe/cwe-400
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import semmle.javascript.security.performance.ReDoSUtil
|
||||
import semmle.javascript.security.performance.ExponentialBackTracking
|
||||
|
||||
from RegExpTerm t, string pump, State s, string prefixMsg
|
||||
where hasReDoSResult(t, pump, s, prefixMsg)
|
||||
select t,
|
||||
"This part of the regular expression may cause exponential backtracking on strings " + prefixMsg +
|
||||
"containing many repetitions of '" + pump + "'."
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<br />
|
||||
|
||||
### Summary
|
||||
|
||||
| Repository | Results |
|
||||
| --- | --- |
|
||||
| github/codeql | [1 result(s)](#file-github-codeql-md) |
|
||||
| meteor/meteor | [5 result(s)](#file-meteor-meteor-md) |
|
||||
@@ -1,17 +0,0 @@
|
||||
### github/codeql
|
||||
|
||||
[javascript/extractor/tests/regexp/input/multipart.js](https://github.com/github/codeql/blob/d094bbc06d063d0da8d0303676943c345e61de53/javascript/extractor/tests/regexp/input/multipart.js#L17-L20)
|
||||
|
||||
<pre><code class="javascript">
|
||||
var bad95 = new RegExp(
|
||||
"<strong>(a" + </strong>
|
||||
<strong> "|" + </strong>
|
||||
<strong> "aa)*" + </strong>
|
||||
<strong> "</strong>b$"
|
||||
);
|
||||
|
||||
</code></pre>
|
||||
|
||||
*This part of the regular expression may cause exponential backtracking on strings containing many repetitions of 'aa'.*
|
||||
|
||||
----------------------------------------
|
||||
@@ -1,71 +0,0 @@
|
||||
### meteor/meteor
|
||||
|
||||
[packages/deprecated/markdown/showdown.js](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/packages/deprecated/markdown/showdown.js#L415-L415)
|
||||
|
||||
<pre><code class="javascript"> /g,hashElement);
|
||||
*/
|
||||
text = text.replace(/(\n\n[ ]{0,3}<!(--<strong>[^\r]*?</strong>--\s*)+>[ \t]*(?=\n{2,}))/g,hashElement);
|
||||
|
||||
// PHP and ASP-style processor instructions (<?...?> and <%...%>)
|
||||
</code></pre>
|
||||
|
||||
*This part of the regular expression may cause exponential backtracking on strings containing many repetitions of '----'.*
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[packages/deprecated/markdown/showdown.js](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/packages/deprecated/markdown/showdown.js#L523-L523)
|
||||
|
||||
<pre><code class="javascript"> // Build a regex to find HTML tags and comments. See Friedl's
|
||||
// "Mastering Regular Expressions", 2nd Ed., pp. 200-201.
|
||||
var regex = /(<[a-z\/!$]("[^"]*"|'[^']*'|[^'">])*>|<!(--<strong>.*?</strong>--\s*)+>)/gi;
|
||||
|
||||
text = text.replace(regex, function(wholeMatch) {
|
||||
</code></pre>
|
||||
|
||||
*This part of the regular expression may cause exponential backtracking on strings starting with '<!--' and containing many repetitions of '----'.*
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[tools/tests/apps/modules/imports/links/acorn/src/parseutil.js](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/tools/tests/apps/modules/imports/links/acorn/src/parseutil.js#L9-L9)
|
||||
|
||||
<pre><code class="javascript">// ## Parser utilities
|
||||
|
||||
const literal = /^(?:'(<strong>(?:\\.|[^'])*?</strong>)'|"((?:\\.|[^"])*?)")/
|
||||
pp.strictDirective = function(start) {
|
||||
for (;;) {
|
||||
</code></pre>
|
||||
|
||||
*This part of the regular expression may cause exponential backtracking on strings starting with ''' and containing many repetitions of '\&'.*
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[tools/tests/apps/modules/imports/links/acorn/src/parseutil.js](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/tools/tests/apps/modules/imports/links/acorn/src/parseutil.js#L9-L9)
|
||||
|
||||
<pre><code class="javascript">const literal = /^(?:'((?:\\.|[^'])*?)'|"(<strong>(?:\\.|[^"])*?</strong>)")/</code></pre>
|
||||
|
||||
*This part of the regular expression may cause exponential backtracking on strings starting with '"' and containing many repetitions of '\!'.*
|
||||
|
||||
----------------------------------------
|
||||
|
||||
[app/src/main/AndroidManifest.xml](https://github.com/AlexRogalskiy/android-nrf-toolbox/blob/034cf3aa7d2a3a4145177de32546ca518a462a66/app/src/main/AndroidManifest.xml#L239-L249)
|
||||
|
||||
<pre><code class="javascript"> </service>
|
||||
|
||||
<strong><activity</strong>
|
||||
<strong> android:name="no.nordicsemi.android.nrftoolbox.dfu.DfuInitiatorActivity"</strong>
|
||||
<strong> android:label="@string/dfu_service_title"</strong>
|
||||
<strong> android:noHistory="true"</strong>
|
||||
<strong> android:theme="@style/AppTheme.Translucent" ></strong>
|
||||
<strong> <intent-filter></strong>
|
||||
<strong> <action android:name="no.nordicsemi.android.action.DFU_UPLOAD" /></strong>
|
||||
<strong></strong>
|
||||
<strong> <category android:name="android.intent.category.DEFAULT" /></strong>
|
||||
<strong> </intent-filter></strong>
|
||||
<strong> </activity></strong>
|
||||
|
||||
<service
|
||||
</code></pre>
|
||||
|
||||
*This component is implicitly exported.*
|
||||
|
||||
----------------------------------------
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"queryName": "Inefficient regular expression",
|
||||
"queryFilePath": "c:\\git-repo\\vscode-codeql-starter\\ql\\javascript\\ql\\src\\Performance\\ReDoS.ql",
|
||||
"queryText": "/**\n * @name Inefficient regular expression\n * @description A regular expression that requires exponential time to match certain inputs\n * can be a performance bottleneck, and may be vulnerable to denial-of-service\n * attacks.\n * @kind problem\n * @problem.severity error\n * @security-severity 7.5\n * @precision high\n * @id js/redos\n * @tags security\n * external/cwe/cwe-1333\n * external/cwe/cwe-730\n * external/cwe/cwe-400\n */\n\nimport javascript\nimport semmle.javascript.security.performance.ReDoSUtil\nimport semmle.javascript.security.performance.ExponentialBackTracking\n\nfrom RegExpTerm t, string pump, State s, string prefixMsg\nwhere hasReDoSResult(t, pump, s, prefixMsg)\nselect t,\n \"This part of the regular expression may cause exponential backtracking on strings \" + prefixMsg +\n \"containing many repetitions of '\" + pump + \"'.\"\n",
|
||||
"language": "javascript",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-controller"
|
||||
},
|
||||
"executionStartTime": 1650464389790,
|
||||
"actionsWorkflowRunId": 2196289254
|
||||
}
|
||||
@@ -1,393 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nwo": "github/codeql",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [],
|
||||
"rawResults": {
|
||||
"schema": {
|
||||
"name": "#select",
|
||||
"rows": 22,
|
||||
"columns": [
|
||||
{
|
||||
"name": "c",
|
||||
"kind": "e"
|
||||
},
|
||||
{
|
||||
"kind": "i"
|
||||
}
|
||||
]
|
||||
},
|
||||
"resultSet": {
|
||||
"schema": {
|
||||
"name": "#select",
|
||||
"rows": 22,
|
||||
"columns": [
|
||||
{
|
||||
"name": "c",
|
||||
"kind": "e"
|
||||
},
|
||||
{
|
||||
"kind": "i"
|
||||
}
|
||||
]
|
||||
},
|
||||
"rows": [
|
||||
[
|
||||
{
|
||||
"label": "functio ... ght);\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/src/Expressions/examples/CompareIdenticalValues.js",
|
||||
"startLine": 8,
|
||||
"startColumn": 32,
|
||||
"endLine": 13,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
6
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... i-1);\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/src/LanguageFeatures/examples/ArgumentsCallerCallee.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 2,
|
||||
"endLine": 5,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
5
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... i-1);\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/src/LanguageFeatures/examples/ArgumentsCallerCalleeGood.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 2,
|
||||
"endLine": 5,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
5
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... n -1;\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/src/Statements/examples/UselessComparisonTest.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 12,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
12
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... false\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/constants.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 8,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
8
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... \\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/loop.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 12,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
12
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... e\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/loop.js",
|
||||
"startLine": 14,
|
||||
"startColumn": 1,
|
||||
"endLine": 22,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
9
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... K\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/loop.js",
|
||||
"startLine": 24,
|
||||
"startColumn": 1,
|
||||
"endLine": 40,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
17
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... e\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/plus.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 17,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
17
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... alse \\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/plus.js",
|
||||
"startLine": 19,
|
||||
"startColumn": 1,
|
||||
"endLine": 28,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
10
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... true\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/plus.js",
|
||||
"startLine": 30,
|
||||
"startColumn": 1,
|
||||
"endLine": 33,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
4
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... K\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/tst.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 15,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
15
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... e\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/tst.js",
|
||||
"startLine": 17,
|
||||
"startColumn": 1,
|
||||
"endLine": 31,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
15
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... false\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/tst.js",
|
||||
"startLine": 33,
|
||||
"startColumn": 1,
|
||||
"endLine": 41,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
9
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... e\\n }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/library-tests/RangeAnalysis/tst.js",
|
||||
"startLine": 43,
|
||||
"startColumn": 1,
|
||||
"endLine": 52,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
10
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... ght);\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Expressions/CompareIdenticalValues/tst.js",
|
||||
"startLine": 8,
|
||||
"startColumn": 32,
|
||||
"endLine": 13,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
6
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... i-1);\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/LanguageFeatures/ArgumentsCallerCallee/tst.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 2,
|
||||
"endLine": 5,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
5
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... }\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Security/CWE-834/LoopBoundInjectionExitBad.js",
|
||||
"startLine": 17,
|
||||
"startColumn": 1,
|
||||
"endLine": 29,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
13
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... true\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Statements/UselessComparisonTest/constant.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 4,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
4
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... n -1;\\n}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Statements/UselessComparisonTest/example.js",
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endLine": 12,
|
||||
"endColumn": 1
|
||||
}
|
||||
},
|
||||
12
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... turn; }",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Statements/UselessComparisonTest/tst.js",
|
||||
"startLine": 8,
|
||||
"startColumn": 3,
|
||||
"endLine": 8,
|
||||
"endColumn": 43
|
||||
}
|
||||
},
|
||||
1
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "| functio ... i+1); |}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/javascript/ql/test/query-tests/Statements/UselessComparisonTest/tst.js",
|
||||
"startLine": 9,
|
||||
"startColumn": 3,
|
||||
"endLine": 9,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
1
|
||||
]
|
||||
]
|
||||
},
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b",
|
||||
"capped": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"nwo": "meteor/meteor",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [],
|
||||
"rawResults": {
|
||||
"schema": {
|
||||
"name": "#select",
|
||||
"rows": 2,
|
||||
"columns": [
|
||||
{
|
||||
"name": "c",
|
||||
"kind": "e"
|
||||
},
|
||||
{
|
||||
"kind": "i"
|
||||
}
|
||||
]
|
||||
},
|
||||
"resultSet": {
|
||||
"schema": {
|
||||
"name": "#select",
|
||||
"rows": 2,
|
||||
"columns": [
|
||||
{
|
||||
"name": "c",
|
||||
"kind": "e"
|
||||
},
|
||||
{
|
||||
"kind": "i"
|
||||
}
|
||||
]
|
||||
},
|
||||
"rows": [
|
||||
[
|
||||
{
|
||||
"label": "functio ... rn H|0}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/packages/logic-solver/minisat.js",
|
||||
"startLine": 7,
|
||||
"startColumn": 91430,
|
||||
"endLine": 7,
|
||||
"endColumn": 105027
|
||||
}
|
||||
},
|
||||
1
|
||||
],
|
||||
[
|
||||
{
|
||||
"label": "functio ... ext;\\n\\t}",
|
||||
"url": {
|
||||
"uri": "file:/home/runner/work/bulk-builder/bulk-builder/packages/sha/sha256.js",
|
||||
"startLine": 94,
|
||||
"startColumn": 2,
|
||||
"endLine": 124,
|
||||
"endColumn": 2
|
||||
}
|
||||
},
|
||||
31
|
||||
]
|
||||
]
|
||||
},
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12",
|
||||
"sourceLocationPrefix": "/home/runner/work/bulk-builder/bulk-builder",
|
||||
"capped": false
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
### Results for "Contradictory guard nodes"
|
||||
|
||||
<details>
|
||||
<summary>Query</summary>
|
||||
|
||||
```ql
|
||||
/**
|
||||
* @name Contradictory guard nodes
|
||||
*
|
||||
* @description Snippet from "UselessComparisonTest.ql"
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Holds if there are any contradictory guard nodes in `container`.
|
||||
*
|
||||
* We use this to restrict reachability analysis to a small set of containers.
|
||||
*/
|
||||
predicate hasContradictoryGuardNodes(StmtContainer container) {
|
||||
exists(ConditionGuardNode guard |
|
||||
RangeAnalysis::isContradictoryGuardNode(guard) and
|
||||
container = guard.getContainer()
|
||||
)
|
||||
}
|
||||
|
||||
from StmtContainer c
|
||||
where hasContradictoryGuardNodes(c)
|
||||
select c, c.getNumLines()
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<br />
|
||||
|
||||
### Summary
|
||||
|
||||
| Repository | Results |
|
||||
| --- | --- |
|
||||
| github/codeql | [22 result(s)](#file-github-codeql-md) |
|
||||
| meteor/meteor | [2 result(s)](#file-meteor-meteor-md) |
|
||||
@@ -1,26 +0,0 @@
|
||||
### github/codeql
|
||||
|
||||
| c | |
|
||||
| --- | --- |
|
||||
| [`functio ... ght);\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/src/Expressions/examples/CompareIdenticalValues.js#L8-L13) | `6` |
|
||||
| [`functio ... i-1);\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/src/LanguageFeatures/examples/ArgumentsCallerCallee.js#L1-L5) | `5` |
|
||||
| [`functio ... i-1);\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/src/LanguageFeatures/examples/ArgumentsCallerCalleeGood.js#L1-L5) | `5` |
|
||||
| [`functio ... n -1;\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/src/Statements/examples/UselessComparisonTest.js#L1-L12) | `12` |
|
||||
| [`functio ... false\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/constants.js#L1-L8) | `8` |
|
||||
| [`functio ... \n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/loop.js#L1-L12) | `12` |
|
||||
| [`functio ... e\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/loop.js#L14-L22) | `9` |
|
||||
| [`functio ... K\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/loop.js#L24-L40) | `17` |
|
||||
| [`functio ... e\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/plus.js#L1-L17) | `17` |
|
||||
| [`functio ... alse \n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/plus.js#L19-L28) | `10` |
|
||||
| [`functio ... true\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/plus.js#L30-L33) | `4` |
|
||||
| [`functio ... K\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/tst.js#L1-L15) | `15` |
|
||||
| [`functio ... e\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/tst.js#L17-L31) | `15` |
|
||||
| [`functio ... false\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/tst.js#L33-L41) | `9` |
|
||||
| [`functio ... e\n }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/library-tests/RangeAnalysis/tst.js#L43-L52) | `10` |
|
||||
| [`functio ... ght);\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Expressions/CompareIdenticalValues/tst.js#L8-L13) | `6` |
|
||||
| [`functio ... i-1);\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/LanguageFeatures/ArgumentsCallerCallee/tst.js#L1-L5) | `5` |
|
||||
| [`functio ... }\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Security/CWE-834/LoopBoundInjectionExitBad.js#L17-L29) | `13` |
|
||||
| [`functio ... true\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Statements/UselessComparisonTest/constant.js#L1-L4) | `4` |
|
||||
| [`functio ... n -1;\n}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Statements/UselessComparisonTest/example.js#L1-L12) | `12` |
|
||||
| [`functio ... turn; }`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Statements/UselessComparisonTest/tst.js#L8-L8) | `1` |
|
||||
| [`\| functio ... i+1); \|}`](https://github.com/github/codeql/blob/cbdd4927cee593b715d8469240ce1d31edaaef9b/javascript/ql/test/query-tests/Statements/UselessComparisonTest/tst.js#L9-L9) | `1` |
|
||||
@@ -1,6 +0,0 @@
|
||||
### meteor/meteor
|
||||
|
||||
| c | |
|
||||
| --- | --- |
|
||||
| [`functio ... rn H\|0}`](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/packages/logic-solver/minisat.js#L7-L7) | `1` |
|
||||
| [`functio ... ext;\n\t}`](https://github.com/meteor/meteor/blob/53f3c4442d3542d3d2a012a854472a0d1bef9d12/packages/sha/sha256.js#L94-L124) | `31` |
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"queryName": "Contradictory guard nodes",
|
||||
"queryFilePath": "c:\\Users\\foo\\bar\\quick-query.ql",
|
||||
"queryText": "/**\n * @name Contradictory guard nodes\n * \n * @description Snippet from \"UselessComparisonTest.ql\"\n */\n\nimport javascript\n\n/**\n * Holds if there are any contradictory guard nodes in `container`.\n *\n * We use this to restrict reachability analysis to a small set of containers.\n */\npredicate hasContradictoryGuardNodes(StmtContainer container) {\n exists(ConditionGuardNode guard |\n RangeAnalysis::isContradictoryGuardNode(guard) and\n container = guard.getContainer()\n )\n}\n\nfrom StmtContainer c\nwhere hasContradictoryGuardNodes(c)\nselect c, c.getNumLines()",
|
||||
"language": "javascript",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-controller"
|
||||
},
|
||||
"executionStartTime": 1650979054479,
|
||||
"actionsWorkflowRunId": 2226920623
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
import { join } from "path";
|
||||
import { readFile, readdir } from "fs-extra";
|
||||
import {
|
||||
generateMarkdown,
|
||||
MarkdownFile,
|
||||
} from "../../../../src/remote-queries/remote-queries-markdown-generation";
|
||||
|
||||
describe("markdown generation", () => {
|
||||
describe("for path-problem query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const pathProblemQuery = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"data/interpreted-results/path-problem/path-problem-query.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
|
||||
const analysesResults = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"data/interpreted-results/path-problem/analyses-results.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
|
||||
const actualFiles = generateMarkdown(
|
||||
pathProblemQuery,
|
||||
analysesResults,
|
||||
"gist",
|
||||
);
|
||||
|
||||
await checkGeneratedMarkdown(
|
||||
actualFiles,
|
||||
"data/interpreted-results/path-problem/expected",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("for problem query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const problemQuery = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"data/interpreted-results/problem/problem-query.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
|
||||
const analysesResults = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"data/interpreted-results/problem/analyses-results.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
const actualFiles = generateMarkdown(
|
||||
problemQuery,
|
||||
analysesResults,
|
||||
"gist",
|
||||
);
|
||||
|
||||
await checkGeneratedMarkdown(
|
||||
actualFiles,
|
||||
"data/interpreted-results/problem/expected",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("for non-alert query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const query = JSON.parse(
|
||||
await readFile(join(__dirname, "data/raw-results/query.json"), "utf8"),
|
||||
);
|
||||
const analysesResults = JSON.parse(
|
||||
await readFile(
|
||||
join(__dirname, "data/raw-results/analyses-results.json"),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
|
||||
const actualFiles = generateMarkdown(query, analysesResults, "gist");
|
||||
|
||||
await checkGeneratedMarkdown(actualFiles, "data/raw-results/expected");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Reads a test output file and returns it as a string.
|
||||
* Replaces line endings with '\n' for consistency across operating systems.
|
||||
*/
|
||||
async function readTestOutputFile(relativePath: string): Promise<string> {
|
||||
const file = await readFile(join(__dirname, relativePath), "utf8");
|
||||
return file.replace(/\r?\n/g, "\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the generated (actual) markdown files to the expected markdown files and
|
||||
* checks whether the names and contents are the same.
|
||||
*/
|
||||
async function checkGeneratedMarkdown(
|
||||
actualFiles: MarkdownFile[],
|
||||
testDataBasePath: string,
|
||||
) {
|
||||
const expectedDir = join(__dirname, testDataBasePath);
|
||||
const expectedFiles = await readdir(expectedDir);
|
||||
|
||||
expect(actualFiles.length).toBe(expectedFiles.length);
|
||||
|
||||
for (const expectedFile of expectedFiles) {
|
||||
const actualFile = actualFiles.find(
|
||||
(f) => `${f.fileName}.md` === expectedFile,
|
||||
);
|
||||
expect(actualFile).toBeDefined();
|
||||
const expectedContent = await readTestOutputFile(
|
||||
join(testDataBasePath, expectedFile),
|
||||
);
|
||||
expect(actualFile!.content.join("\n")).toBe(expectedContent);
|
||||
}
|
||||
}
|
||||
@@ -78,9 +78,6 @@ describe("Variant Analysis Manager", () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.spyOn(extLogger, "log").mockResolvedValue(undefined);
|
||||
jest
|
||||
.spyOn(config, "isVariantAnalysisLiveResultsEnabled")
|
||||
.mockReturnValue(false);
|
||||
|
||||
cancellationTokenSource = new CancellationTokenSource();
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { CancellationTokenSource, commands, extensions } from "vscode";
|
||||
import { CodeQLExtensionInterface } from "../../../../src/extension";
|
||||
import * as config from "../../../../src/config";
|
||||
|
||||
import * as ghApiClient from "../../../../src/remote-queries/gh-api/gh-api-client";
|
||||
import { VariantAnalysisMonitor } from "../../../../src/remote-queries/variant-analysis-monitor";
|
||||
@@ -46,10 +45,6 @@ describe("Variant Analysis Monitor", () => {
|
||||
const onVariantAnalysisChangeSpy = jest.fn();
|
||||
|
||||
beforeEach(async () => {
|
||||
jest
|
||||
.spyOn(config, "isVariantAnalysisLiveResultsEnabled")
|
||||
.mockReturnValue(false);
|
||||
|
||||
cancellationTokenSource = new CancellationTokenSource();
|
||||
|
||||
variantAnalysis = createMockVariantAnalysis({});
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* @name MRVA Integration test 1
|
||||
* @kind problem
|
||||
* @problem.severity warning
|
||||
* @id javascript/integration-test-1
|
||||
*/
|
||||
import javascript
|
||||
|
||||
from MemberDeclaration md
|
||||
where md.getName() = "dispose"
|
||||
select md, "Dispose method"
|
||||
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* @name MRVA Integration test 2
|
||||
* @kind problem
|
||||
* @problem.severity warning
|
||||
* @id javascript/integration-test-2
|
||||
*/
|
||||
import javascript
|
||||
|
||||
from MemberDeclaration md
|
||||
where md.getName() = "refresh"
|
||||
select md, "Refresh method"
|
||||
@@ -1 +0,0 @@
|
||||
other/hucairz
|
||||
@@ -1 +0,0 @@
|
||||
15
|
||||
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
"md","col1"
|
||||
"dispose ... ();\n }","Dispose method"
|
||||
"readonl ... > void;","Dispose method"
|
||||
"async d ... }\n }","Dispose method"
|
||||
"dispose(): any;","Dispose method"
|
||||
"public ... }\n }","Dispose method"
|
||||
"dispose: () => void;","Dispose method"
|
||||
"dispose ... ');\n }","Dispose method"
|
||||
"dispose ... ();\n }","Dispose method"
|
||||
"public ... ();\n }","Dispose method"
|
||||
"readonl ... > void;","Dispose method"
|
||||
"dispose(): unknown","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
|
@@ -1,19 +0,0 @@
|
||||
## github/vscode-codeql
|
||||
|
||||
| - | Message |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
|
||||
| [dispose ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/cli.ts#L211) | Dispose method |
|
||||
| [readonl ... > void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/extension.ts#L166) | Dispose method |
|
||||
| [async d ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/logging.ts#L151) | Dispose method |
|
||||
| [dispose(): any;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L5) | Dispose method |
|
||||
| [public ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L65) | Dispose method |
|
||||
| [dispose: () => void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-results.ts#L54) | Dispose method |
|
||||
| [dispose ... ');\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/queryserver-client.ts#L32) | Dispose method |
|
||||
| [dispose ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/telemetry.ts#L129) | Dispose method |
|
||||
| [public ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/test-ui.ts#L54) | Dispose method |
|
||||
| [readonl ... > void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/run-queries.ts#L327) | Dispose method |
|
||||
| [dispose(): unknown](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/vscode-tests/no-workspace/helpers.test.ts#L150) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L12) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L13) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L14) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L15) | Dispose method |
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
github/vscode-codeql
|
||||
@@ -1 +0,0 @@
|
||||
15
|
||||
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
"md","col1"
|
||||
"dispose ... ();\n }","Dispose method"
|
||||
"readonl ... > void;","Dispose method"
|
||||
"async d ... }\n }","Dispose method"
|
||||
"dispose(): any;","Dispose method"
|
||||
"public ... }\n }","Dispose method"
|
||||
"dispose: () => void;","Dispose method"
|
||||
"dispose ... ');\n }","Dispose method"
|
||||
"dispose ... ();\n }","Dispose method"
|
||||
"public ... ();\n }","Dispose method"
|
||||
"readonl ... > void;","Dispose method"
|
||||
"dispose(): unknown","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
"dispose ... inonSpy","Dispose method"
|
||||
|
@@ -1,19 +0,0 @@
|
||||
## github/vscode-codeql
|
||||
|
||||
| - | Message |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
|
||||
| [dispose ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/cli.ts#L211) | Dispose method |
|
||||
| [readonl ... > void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/extension.ts#L166) | Dispose method |
|
||||
| [async d ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/logging.ts#L151) | Dispose method |
|
||||
| [dispose(): any;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L5) | Dispose method |
|
||||
| [public ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L65) | Dispose method |
|
||||
| [dispose: () => void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-results.ts#L54) | Dispose method |
|
||||
| [dispose ... ');\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/queryserver-client.ts#L32) | Dispose method |
|
||||
| [dispose ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/telemetry.ts#L129) | Dispose method |
|
||||
| [public ... ();\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/test-ui.ts#L54) | Dispose method |
|
||||
| [readonl ... > void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/run-queries.ts#L327) | Dispose method |
|
||||
| [dispose(): unknown](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/vscode-tests/no-workspace/helpers.test.ts#L150) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L12) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L13) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L14) | Dispose method |
|
||||
| [dispose ... inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L15) | Dispose method |
|
||||
File diff suppressed because one or more lines are too long
@@ -1,45 +0,0 @@
|
||||
{
|
||||
"executionEndTime": 1645645080281,
|
||||
"analysisSummaries": [
|
||||
{
|
||||
"nwo": "github/vscode-codeql",
|
||||
"resultCount": 15,
|
||||
"starCount": 1,
|
||||
"lastUpdated": 1653447088649,
|
||||
"fileSizeInBytes": 191025,
|
||||
"downloadLink": {
|
||||
"id": "171543249",
|
||||
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/171543249",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"nwo": "other/hucairz",
|
||||
"resultCount": 15,
|
||||
"starCount": 1,
|
||||
"lastUpdated": 1653447088649,
|
||||
"fileSizeInBytes": 191025,
|
||||
"downloadLink": {
|
||||
"id": "11111111",
|
||||
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/11111111",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"nwo": "hucairz/i-dont-exist",
|
||||
"resultCount": 5,
|
||||
"starCount": 1,
|
||||
"fileSizeInBytes": 81237,
|
||||
"downloadLink": {
|
||||
"id": "999999",
|
||||
"urlPath": "/these/results/will/never/be/downloaded/999999",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
|
||||
}
|
||||
}
|
||||
],
|
||||
"analysisFailures": [],
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"queryName": "MRVA Integration test 1",
|
||||
"queryFilePath": "PLACEHOLDER/q0.ql",
|
||||
"queryText": "/**\n * @name MRVA Integration test 1\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-1\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"dispose\"\nselect md, \"Dispose method\"\n",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-run2"
|
||||
},
|
||||
"repositories": [
|
||||
{
|
||||
"owner": "github",
|
||||
"name": "vscode-codeql"
|
||||
}
|
||||
],
|
||||
"executionStartTime": 1645644967533,
|
||||
"actionsWorkflowRunId": 1889315769
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
1645644971870
|
||||
@@ -1 +0,0 @@
|
||||
github/vscode-codeql
|
||||
@@ -1 +0,0 @@
|
||||
5
|
||||
Binary file not shown.
@@ -1,6 +0,0 @@
|
||||
"md","col1"
|
||||
"refresh ... d);\n }","Refresh method"
|
||||
"refresh ... <void>;","Refresh method"
|
||||
"public ... }\n }","Refresh method"
|
||||
"public ... }\n }","Refresh method"
|
||||
"refresh ... d);\n }","Refresh method"
|
||||
|
@@ -1,9 +0,0 @@
|
||||
## github/vscode-codeql
|
||||
|
||||
| - | Message |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------- |
|
||||
| [refresh ... d);\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/astViewer.ts#L58) | Refresh method |
|
||||
| [refresh ... <void>;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/databases.ts#L234) | Refresh method |
|
||||
| [public ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/databases.ts#L354) | Refresh method |
|
||||
| [public ... }\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/discovery.ts#L21) | Refresh method |
|
||||
| [refresh ... d);\n }](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-history.ts#L268) | Refresh method |
|
||||
File diff suppressed because one or more lines are too long
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"executionEndTime": 1645645150738,
|
||||
"analysisSummaries": [
|
||||
{
|
||||
"nwo": "github/vscode-codeql",
|
||||
"resultCount": 5,
|
||||
"starCount": 1,
|
||||
"lastUpdated": 1653447088649,
|
||||
"fileSizeInBytes": 81237,
|
||||
"downloadLink": {
|
||||
"id": "171544171",
|
||||
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/171544171",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
|
||||
}
|
||||
}
|
||||
],
|
||||
"analysisFailures": [],
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"queryName": "MRVA Integration test 2",
|
||||
"queryFilePath": "PLACEHOLDER/q1.ql",
|
||||
"queryText": "/**\n * @name MRVA Integration test 2\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-2\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"refresh\"\nselect md, \"Refresh method\"\n",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-run2"
|
||||
},
|
||||
"repositories": [
|
||||
{
|
||||
"owner": "github",
|
||||
"name": "vscode-codeql"
|
||||
}
|
||||
],
|
||||
"executionStartTime": 1645644973911,
|
||||
"actionsWorkflowRunId": 1889316048
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
1645644974055
|
||||
@@ -1,462 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nwo": "github/codeql",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 35,
|
||||
"endLine": 4,
|
||||
"endColumn": 44
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 3,
|
||||
"endLine": 6,
|
||||
"text": "function cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 5,
|
||||
"startColumn": 15,
|
||||
"endLine": 5,
|
||||
"endColumn": 18
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 35,
|
||||
"endLine": 4,
|
||||
"endColumn": 44
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 25,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 13,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 2,
|
||||
"endLine": 6,
|
||||
"text": " path = require(\"path\");\nfunction cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 4,
|
||||
"startColumn": 7,
|
||||
"endLine": 4,
|
||||
"endColumn": 53
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/src/Security/CWE-078/examples/shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 3,
|
||||
"endLine": 6,
|
||||
"text": "function cleanupTemp() {\n let cmd = \"rm -rf \" + path.join(__dirname, \"temp\");\n cp.execSync(cmd); // BAD\n}\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 5,
|
||||
"startColumn": 15,
|
||||
"endLine": 5,
|
||||
"endColumn": 18
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 36,
|
||||
"endLine": 6,
|
||||
"endColumn": 45
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 14,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 36,
|
||||
"endLine": 6,
|
||||
"endColumn": 45
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 26,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/github/codeql/blob/48015e5a2e6202131f2d1062cc066dc33ed69a9b",
|
||||
"filePath": "javascript/ql/test/query-tests/Security/CWE-078/tst_shell-command-injection-from-environment.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 4,
|
||||
"endLine": 8,
|
||||
"text": "(function() {\n\tcp.execFileSync('rm', ['-rf', path.join(__dirname, \"temp\")]); // GOOD\n\tcp.execSync('rm -rf ' + path.join(__dirname, \"temp\")); // BAD\n\n\texeca.shell('rm -rf ' + path.join(__dirname, \"temp\")); // NOT OK\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 6,
|
||||
"startColumn": 14,
|
||||
"endLine": 6,
|
||||
"endColumn": 54
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"nwo": "test/no-results",
|
||||
"status": "Completed",
|
||||
"interpretedResults": []
|
||||
},
|
||||
{
|
||||
"nwo": "meteor/meteor",
|
||||
"status": "Completed",
|
||||
"interpretedResults": [
|
||||
{
|
||||
"message": {
|
||||
"tokens": [
|
||||
{
|
||||
"t": "text",
|
||||
"text": "This shell command depends on an uncontrolled "
|
||||
},
|
||||
{
|
||||
"t": "location",
|
||||
"text": "absolute path",
|
||||
"location": {
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "t": "text", "text": "." }
|
||||
]
|
||||
},
|
||||
"shortDescription": "This shell command depends on an uncontrolled ,absolute path,.",
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"severity": "Warning",
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
},
|
||||
"codeFlows": [
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 7,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 42,
|
||||
"endLine": 46,
|
||||
"text": " METEOR_LATEST_VERSION,\n extractPath: rootPath,\n meteorPath,\n release: process.env.INSTALL_METEOR_VERSION || METEOR_LATEST_VERSION,\n rootPath,\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 44,
|
||||
"startColumn": 3,
|
||||
"endLine": 44,
|
||||
"endColumn": 13
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 10,
|
||||
"endLine": 14,
|
||||
"text": "const os = require('os');\nconst {\n meteorPath,\n release,\n startedPath,\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 12,
|
||||
"startColumn": 3,
|
||||
"endLine": 12,
|
||||
"endColumn": 13
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 9,
|
||||
"endLine": 25,
|
||||
"text": "const tmp = require('tmp');\nconst os = require('os');\nconst {\n meteorPath,\n release,\n startedPath,\n extractPath,\n isWindows,\n rootPath,\n sudoUser,\n isSudo,\n isMac,\n METEOR_LATEST_VERSION,\n shouldSetupExecPath,\n} = require('./config.js');\nconst { uninstall } = require('./uninstall');\nconst {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 11,
|
||||
"startColumn": 7,
|
||||
"endLine": 23,
|
||||
"endColumn": 27
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 42,
|
||||
"endLine": 259,
|
||||
"endColumn": 52
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"threadFlows": [
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/config.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 37,
|
||||
"endLine": 41,
|
||||
"text": "\nconst meteorLocalFolder = '.meteor';\nconst meteorPath = path.resolve(rootPath, meteorLocalFolder);\n\nmodule.exports = {\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 39,
|
||||
"startColumn": 20,
|
||||
"endLine": 39,
|
||||
"endColumn": 61
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileLink": {
|
||||
"fileLinkPrefix": "https://github.com/meteor/meteor/blob/73b538fe201cbfe89dd0c709689023f9b3eab1ec",
|
||||
"filePath": "npm-packages/meteor-installer/install.js"
|
||||
},
|
||||
"codeSnippet": {
|
||||
"startLine": 257,
|
||||
"endLine": 261,
|
||||
"text": " if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path \"${meteorPath}/;%path%`);\n return;\n }\n"
|
||||
},
|
||||
"highlightedRegion": {
|
||||
"startLine": 259,
|
||||
"startColumn": 28,
|
||||
"endLine": 259,
|
||||
"endColumn": 62
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"queryName": "Shell command built from environment values",
|
||||
"queryFilePath": "c:\\git-repo\\vscode-codeql-starter\\ql\\javascript\\ql\\src\\Security\\CWE-078\\ShellCommandInjectionFromEnvironment.ql",
|
||||
"queryText": "/**\n * @name Shell command built from environment values\n * @description Building a shell command string with values from the enclosing\n * environment may cause subtle bugs or vulnerabilities.\n * @kind path-problem\n * @problem.severity warning\n * @security-severity 6.3\n * @precision high\n * @id js/shell-command-injection-from-environment\n * @tags correctness\n * security\n * external/cwe/cwe-078\n * external/cwe/cwe-088\n */\n\nimport javascript\nimport DataFlow::PathGraph\nimport semmle.javascript.security.dataflow.ShellCommandInjectionFromEnvironmentQuery\n\nfrom\n Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink, DataFlow::Node highlight,\n Source sourceNode\nwhere\n sourceNode = source.getNode() and\n cfg.hasFlowPath(source, sink) and\n if cfg.isSinkWithHighlight(sink.getNode(), _)\n then cfg.isSinkWithHighlight(sink.getNode(), highlight)\n else highlight = sink.getNode()\nselect highlight, source, sink, \"This shell command depends on an uncontrolled $@.\", sourceNode,\n sourceNode.getSourceType()\n",
|
||||
"language": "javascript",
|
||||
"controllerRepository": { "owner": "dsp-testing", "name": "qc-controller" },
|
||||
"executionStartTime": 1649419081990,
|
||||
"actionsWorkflowRunId": 2115000864,
|
||||
"repositoryCount": 10
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
{
|
||||
"version": 1,
|
||||
"queries": [
|
||||
{
|
||||
"t": "remote",
|
||||
"status": "Completed",
|
||||
"completed": true,
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx",
|
||||
"label": "MRVA Integration test 1",
|
||||
"remoteQuery": {
|
||||
"queryName": "MRVA Integration test 1",
|
||||
"queryFilePath": "PLACEHOLDER/q0.ql",
|
||||
"queryText": "/**\n * @name MRVA Integration test 1\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-1\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"dispose\"\nselect md, \"Dispose method\"\n",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-run2"
|
||||
},
|
||||
"repositories": [
|
||||
{
|
||||
"owner": "github",
|
||||
"name": "vscode-codeql"
|
||||
}
|
||||
],
|
||||
"executionStartTime": 1645644967533,
|
||||
"actionsWorkflowRunId": 1889315769
|
||||
}
|
||||
},
|
||||
{
|
||||
"t": "remote",
|
||||
"status": "Completed",
|
||||
"completed": true,
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN",
|
||||
"label": "MRVA Integration test 2",
|
||||
"remoteQuery": {
|
||||
"queryName": "MRVA Integration test 2",
|
||||
"queryFilePath": "PLACEHOLDER/q1.ql",
|
||||
"queryText": "/**\n * @name MRVA Integration test 2\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-2\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"refresh\"\nselect md, \"Refresh method\"\n",
|
||||
"controllerRepository": {
|
||||
"owner": "dsp-testing",
|
||||
"name": "qc-run2"
|
||||
},
|
||||
"repositories": [
|
||||
{
|
||||
"owner": "github",
|
||||
"name": "vscode-codeql"
|
||||
}
|
||||
],
|
||||
"executionStartTime": 1645644973911,
|
||||
"actionsWorkflowRunId": 1889316048
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2,15 +2,12 @@ import { env } from "vscode";
|
||||
import { QueryHistoryConfig } from "../../../../src/config";
|
||||
import { HistoryItemLabelProvider } from "../../../../src/query-history/history-item-label-provider";
|
||||
import { createMockLocalQueryInfo } from "../../../factories/query-history/local-query-history-item";
|
||||
import { createMockRemoteQueryHistoryItem } from "../../../factories/query-history/remote-query-history-item";
|
||||
import { QueryStatus } from "../../../../src/query-status";
|
||||
|
||||
describe("HistoryItemLabelProvider", () => {
|
||||
let labelProvider: HistoryItemLabelProvider;
|
||||
let config: QueryHistoryConfig;
|
||||
const date = new Date("2022-01-01T00:00:00.000Z");
|
||||
const dateStr = date.toLocaleString(env.language);
|
||||
const executionStartTime = date.getTime();
|
||||
const userSpecifiedLabel = "user-specified-name";
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -85,149 +82,4 @@ describe("HistoryItemLabelProvider", () => {
|
||||
expect(labelProvider.getShortLabel(fqi2)).toBe("query-file.ql");
|
||||
});
|
||||
});
|
||||
|
||||
describe("remote queries", () => {
|
||||
it("should interpolate query when user specified", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({ userSpecifiedLabel });
|
||||
|
||||
expect(labelProvider.getLabel(fqi)).toBe(userSpecifiedLabel);
|
||||
|
||||
fqi.userSpecifiedLabel = "%t %q %d %s %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) github/vscode-codeql-integration-tests in progress %`,
|
||||
);
|
||||
|
||||
fqi.userSpecifiedLabel = "%t %q %d %s %%::%t %q %d %s %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) github/vscode-codeql-integration-tests in progress %::${dateStr} query-name (javascript) github/vscode-codeql-integration-tests in progress %`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should interpolate query when not user-specified", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 16,
|
||||
});
|
||||
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
"xxx query-name (javascript) xxx",
|
||||
);
|
||||
|
||||
config.format = "%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) github/vscode-codeql-integration-tests completed query-file.ql (16 results) %`,
|
||||
);
|
||||
|
||||
config.format = "%t %q %d %s %f %r %%::%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) github/vscode-codeql-integration-tests completed query-file.ql (16 results) %::${dateStr} query-name (javascript) github/vscode-codeql-integration-tests completed query-file.ql (16 results) %`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should use number of repositories instead of controller repo if available", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 16,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
|
||||
config.format = "%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) 2 repositories completed query-file.ql (16 results) %`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should get query short label", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
userSpecifiedLabel,
|
||||
});
|
||||
|
||||
// fall back on user specified if one exists.
|
||||
expect(labelProvider.getShortLabel(fqi)).toBe("user-specified-name");
|
||||
|
||||
// use query name if no user-specified label exists
|
||||
const fqi2 = createMockRemoteQueryHistoryItem({});
|
||||
|
||||
expect(labelProvider.getShortLabel(fqi2)).toBe("query-name");
|
||||
});
|
||||
|
||||
describe("when results are present", () => {
|
||||
it("should display results if there are any", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 16,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
config.format = "%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) 2 repositories completed query-file.ql (16 results) %`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when results are not present", () => {
|
||||
it("should skip displaying them", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 0,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
config.format = "%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) 2 repositories completed query-file.ql %`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when extra whitespace is present in the middle of the label", () => {
|
||||
it("should squash it down to a single whitespace", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 0,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
config.format = "%t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) 2 repositories completed query-file.ql %`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when extra whitespace is present at the start of the label", () => {
|
||||
it("should squash it down to a single whitespace", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 0,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
config.format = " %t %q %d %s %f %r %%";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
` ${dateStr} query-name (javascript) 2 repositories completed query-file.ql %`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when extra whitespace is present at the end of the label", () => {
|
||||
it("should squash it down to a single whitespace", () => {
|
||||
const fqi = createMockRemoteQueryHistoryItem({
|
||||
status: QueryStatus.Completed,
|
||||
executionStartTime,
|
||||
resultCount: 0,
|
||||
repositoryCount: 2,
|
||||
});
|
||||
config.format = "%t %q %d %s %f %r %% ";
|
||||
expect(labelProvider.getLabel(fqi)).toBe(
|
||||
`${dateStr} query-name (javascript) 2 repositories completed query-file.ql % `,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,7 +10,6 @@ import { LocalQueryInfo } from "../../../../src/query-results";
|
||||
import { DatabaseManager } from "../../../../src/databases";
|
||||
import { tmpDir } from "../../../../src/helpers";
|
||||
import { HistoryItemLabelProvider } from "../../../../src/query-history/history-item-label-provider";
|
||||
import { RemoteQueriesManager } from "../../../../src/remote-queries/remote-queries-manager";
|
||||
import { ResultsView } from "../../../../src/interface";
|
||||
import { EvalLogViewer } from "../../../../src/eval-log-viewer";
|
||||
import { QueryRunner } from "../../../../src/queryRunner";
|
||||
@@ -20,15 +19,11 @@ import {
|
||||
createMockLocalQueryInfo,
|
||||
createMockQueryWithResults,
|
||||
} from "../../../factories/query-history/local-query-history-item";
|
||||
import { createMockRemoteQueryHistoryItem } from "../../../factories/query-history/remote-query-history-item";
|
||||
import { RemoteQueryHistoryItem } from "../../../../src/remote-queries/remote-query-history-item";
|
||||
import { shuffleHistoryItems } from "../../utils/query-history-helpers";
|
||||
import { createMockVariantAnalysisHistoryItem } from "../../../factories/query-history/variant-analysis-history-item";
|
||||
import { VariantAnalysisHistoryItem } from "../../../../src/query-history/variant-analysis-history-item";
|
||||
import { QueryStatus } from "../../../../src/query-status";
|
||||
import { VariantAnalysisStatus } from "../../../../src/remote-queries/shared/variant-analysis";
|
||||
import { Credentials } from "../../../../src/common/authentication";
|
||||
import { createMockApp } from "../../../__mocks__/appMock";
|
||||
import {
|
||||
HistoryTreeDataProvider,
|
||||
SortOrder,
|
||||
@@ -43,12 +38,10 @@ describe("HistoryTreeDataProvider", () => {
|
||||
let queryHistoryManager: QueryHistoryManager;
|
||||
|
||||
let localQueriesResultsViewStub: ResultsView;
|
||||
let remoteQueriesManagerStub: RemoteQueriesManager;
|
||||
let variantAnalysisManagerStub: VariantAnalysisManager;
|
||||
|
||||
let allHistory: QueryHistoryInfo[];
|
||||
let localQueryHistory: LocalQueryInfo[];
|
||||
let remoteQueryHistory: RemoteQueryHistoryItem[];
|
||||
let variantAnalysisHistory: VariantAnalysisHistoryItem[];
|
||||
|
||||
let historyTreeDataProvider: HistoryTreeDataProvider;
|
||||
@@ -61,13 +54,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
localQueriesResultsViewStub = {
|
||||
showResults: jest.fn(),
|
||||
} as any as ResultsView;
|
||||
remoteQueriesManagerStub = {
|
||||
onRemoteQueryAdded: jest.fn(),
|
||||
onRemoteQueryRemoved: jest.fn(),
|
||||
onRemoteQueryStatusUpdate: jest.fn(),
|
||||
removeRemoteQuery: jest.fn(),
|
||||
openRemoteQueryResults: jest.fn(),
|
||||
} as any as RemoteQueriesManager;
|
||||
|
||||
variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: jest.fn(),
|
||||
@@ -111,12 +97,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
// in progress
|
||||
createMockLocalQueryInfo({ resultCount: 0 }),
|
||||
];
|
||||
remoteQueryHistory = [
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.Completed }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.Failed }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.InProgress }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.InProgress }),
|
||||
];
|
||||
variantAnalysisHistory = [
|
||||
createMockVariantAnalysisHistoryItem({
|
||||
historyItemStatus: QueryStatus.Completed,
|
||||
@@ -137,7 +117,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
];
|
||||
allHistory = shuffleHistoryItems([
|
||||
...localQueryHistory,
|
||||
...remoteQueryHistory,
|
||||
...variantAnalysisHistory,
|
||||
]);
|
||||
|
||||
@@ -242,12 +221,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
|
||||
describe("sorting", () => {
|
||||
const history = [
|
||||
createMockRemoteQueryHistoryItem({
|
||||
userSpecifiedLabel: "a",
|
||||
executionStartTime: 2,
|
||||
resultCount: 24,
|
||||
status: QueryStatus.Completed,
|
||||
}),
|
||||
createMockLocalQueryInfo({
|
||||
userSpecifiedLabel: "b",
|
||||
startTime: new Date(10),
|
||||
@@ -284,12 +257,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
historyItemStatus: QueryStatus.Failed,
|
||||
variantAnalysisStatus: VariantAnalysisStatus.Failed,
|
||||
}),
|
||||
createMockRemoteQueryHistoryItem({
|
||||
userSpecifiedLabel: "h",
|
||||
executionStartTime: 6,
|
||||
resultCount: 5,
|
||||
status: QueryStatus.InProgress,
|
||||
}),
|
||||
];
|
||||
|
||||
let treeDataProvider: HistoryTreeDataProvider;
|
||||
@@ -318,14 +285,12 @@ describe("HistoryTreeDataProvider", () => {
|
||||
|
||||
it("should get children for date ascending", async () => {
|
||||
const expected = [
|
||||
history[4],
|
||||
history[2],
|
||||
history[5],
|
||||
history[0],
|
||||
history[3],
|
||||
history[7],
|
||||
history[6],
|
||||
history[1],
|
||||
history[2],
|
||||
history[4],
|
||||
history[3],
|
||||
];
|
||||
treeDataProvider.sortOrder = SortOrder.DateAsc;
|
||||
|
||||
@@ -335,14 +300,12 @@ describe("HistoryTreeDataProvider", () => {
|
||||
|
||||
it("should get children for date descending", async () => {
|
||||
const expected = [
|
||||
history[4],
|
||||
history[2],
|
||||
history[5],
|
||||
history[0],
|
||||
history[3],
|
||||
history[7],
|
||||
history[6],
|
||||
history[1],
|
||||
history[2],
|
||||
history[4],
|
||||
history[3],
|
||||
].reverse();
|
||||
|
||||
treeDataProvider.sortOrder = SortOrder.DateDesc;
|
||||
@@ -353,14 +316,12 @@ describe("HistoryTreeDataProvider", () => {
|
||||
|
||||
it("should get children for result count ascending", async () => {
|
||||
const expected = [
|
||||
history[7],
|
||||
history[5],
|
||||
history[4],
|
||||
history[1],
|
||||
history[0],
|
||||
history[3],
|
||||
history[6],
|
||||
history[0],
|
||||
history[2],
|
||||
history[5],
|
||||
history[1],
|
||||
];
|
||||
treeDataProvider.sortOrder = SortOrder.CountAsc;
|
||||
|
||||
@@ -371,14 +332,12 @@ describe("HistoryTreeDataProvider", () => {
|
||||
|
||||
it("should get children for result count descending", async () => {
|
||||
const expected = [
|
||||
history[7],
|
||||
history[5],
|
||||
history[4],
|
||||
history[1],
|
||||
history[0],
|
||||
history[3],
|
||||
history[6],
|
||||
history[0],
|
||||
history[2],
|
||||
history[5],
|
||||
history[1],
|
||||
].reverse();
|
||||
treeDataProvider.sortOrder = SortOrder.CountDesc;
|
||||
|
||||
@@ -404,12 +363,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
historyItemStatus: QueryStatus.Completed,
|
||||
variantAnalysisStatus: VariantAnalysisStatus.Failed,
|
||||
}),
|
||||
createMockRemoteQueryHistoryItem({
|
||||
userSpecifiedLabel: "d",
|
||||
resultCount: 0,
|
||||
executionStartTime: 50,
|
||||
status: QueryStatus.Completed,
|
||||
}),
|
||||
createMockVariantAnalysisHistoryItem({
|
||||
userSpecifiedLabel: "e",
|
||||
resultCount: 0,
|
||||
@@ -447,12 +400,6 @@ describe("HistoryTreeDataProvider", () => {
|
||||
historyItemStatus: QueryStatus.Completed,
|
||||
variantAnalysisStatus: VariantAnalysisStatus.Failed,
|
||||
}),
|
||||
createMockRemoteQueryHistoryItem({
|
||||
userSpecifiedLabel: "d",
|
||||
resultCount: 0,
|
||||
executionStartTime: 50,
|
||||
status: QueryStatus.Completed,
|
||||
}),
|
||||
createMockVariantAnalysisHistoryItem({
|
||||
userSpecifiedLabel: "e",
|
||||
resultCount: 0,
|
||||
@@ -472,16 +419,11 @@ describe("HistoryTreeDataProvider", () => {
|
||||
});
|
||||
});
|
||||
|
||||
async function createMockQueryHistory(
|
||||
allHistory: QueryHistoryInfo[],
|
||||
credentials?: Credentials,
|
||||
) {
|
||||
async function createMockQueryHistory(allHistory: QueryHistoryInfo[]) {
|
||||
const qhm = new QueryHistoryManager(
|
||||
createMockApp({ credentials }),
|
||||
{} as QueryRunner,
|
||||
{} as DatabaseManager,
|
||||
localQueriesResultsViewStub,
|
||||
remoteQueriesManagerStub,
|
||||
variantAnalysisManagerStub,
|
||||
{} as EvalLogViewer,
|
||||
"xxx",
|
||||
|
||||
@@ -10,7 +10,6 @@ import { VariantAnalysisHistoryItem } from "../../../../src/query-history/varian
|
||||
import { createMockVariantAnalysis } from "../../../factories/remote-queries/shared/variant-analysis";
|
||||
import { createMockScannedRepos } from "../../../factories/remote-queries/shared/scanned-repositories";
|
||||
import { createMockLocalQueryInfo } from "../../../factories/query-history/local-query-history-item";
|
||||
import { createMockRemoteQueryHistoryItem } from "../../../factories/query-history/remote-query-history-item";
|
||||
import {
|
||||
VariantAnalysisRepoStatus,
|
||||
VariantAnalysisStatus,
|
||||
@@ -19,7 +18,6 @@ import {
|
||||
describe("Query history info", () => {
|
||||
const date = new Date("2022-01-01T00:00:00.000Z");
|
||||
const localQueryHistoryItem = createMockLocalQueryInfo({ startTime: date });
|
||||
const remoteQueryHistoryItem = createMockRemoteQueryHistoryItem({});
|
||||
const variantAnalysisHistoryItem: VariantAnalysisHistoryItem = {
|
||||
t: "variant-analysis",
|
||||
status: QueryStatus.InProgress,
|
||||
@@ -42,12 +40,6 @@ describe("Query history info", () => {
|
||||
expect(queryName).toBe(localQueryHistoryItem.getQueryName());
|
||||
});
|
||||
|
||||
it("should get the name for remote query history items", () => {
|
||||
const queryName = getRawQueryName(remoteQueryHistoryItem);
|
||||
|
||||
expect(queryName).toBe(remoteQueryHistoryItem.remoteQuery.queryName);
|
||||
});
|
||||
|
||||
it("should get the name for variant analysis history items", () => {
|
||||
const queryName = getRawQueryName(variantAnalysisHistoryItem);
|
||||
|
||||
@@ -64,12 +56,6 @@ describe("Query history info", () => {
|
||||
expect(historyItemId).toBe(localQueryHistoryItem.initialInfo.id);
|
||||
});
|
||||
|
||||
it("should get the ID for remote query history items", () => {
|
||||
const historyItemId = getQueryId(remoteQueryHistoryItem);
|
||||
|
||||
expect(historyItemId).toBe(remoteQueryHistoryItem.queryId);
|
||||
});
|
||||
|
||||
it("should get the ID for variant analysis history items", () => {
|
||||
const historyItemId = getQueryId(variantAnalysisHistoryItem);
|
||||
|
||||
@@ -86,12 +72,6 @@ describe("Query history info", () => {
|
||||
expect(queryText).toBe(localQueryHistoryItem.initialInfo.queryText);
|
||||
});
|
||||
|
||||
it("should get the query text for remote query history items", () => {
|
||||
const queryText = getQueryText(remoteQueryHistoryItem);
|
||||
|
||||
expect(queryText).toBe(remoteQueryHistoryItem.remoteQuery.queryText);
|
||||
});
|
||||
|
||||
it("should get the query text for variant analysis history items", () => {
|
||||
const queryText = getQueryText(variantAnalysisHistoryItem);
|
||||
|
||||
@@ -102,23 +82,6 @@ describe("Query history info", () => {
|
||||
});
|
||||
|
||||
describe("buildRepoLabel", () => {
|
||||
describe("repo label for remote query history items", () => {
|
||||
it("should return controller repo when `repositoryCount` is 0", () => {
|
||||
const repoLabel = buildRepoLabel(remoteQueryHistoryItem);
|
||||
const expectedRepoLabel = `${remoteQueryHistoryItem.remoteQuery.controllerRepository.owner}/${remoteQueryHistoryItem.remoteQuery.controllerRepository.name}`;
|
||||
|
||||
expect(repoLabel).toBe(expectedRepoLabel);
|
||||
});
|
||||
it("should return number of repositories when `repositoryCount` is non-zero", () => {
|
||||
const remoteQueryHistoryItem2 = createMockRemoteQueryHistoryItem({
|
||||
repositoryCount: 3,
|
||||
});
|
||||
const repoLabel2 = buildRepoLabel(remoteQueryHistoryItem2);
|
||||
const expectedRepoLabel2 = "3 repositories";
|
||||
|
||||
expect(repoLabel2).toBe(expectedRepoLabel2);
|
||||
});
|
||||
});
|
||||
describe("repo label for variant analysis history items", () => {
|
||||
it("should return label when `totalScannedRepositoryCount` is 0", () => {
|
||||
const variantAnalysisHistoryItem0: VariantAnalysisHistoryItem = {
|
||||
@@ -159,18 +122,6 @@ describe("Query history info", () => {
|
||||
});
|
||||
|
||||
describe("getActionsWorkflowRunUrl", () => {
|
||||
it("should get the run url for remote query history items", () => {
|
||||
const actionsWorkflowRunUrl = getActionsWorkflowRunUrl(
|
||||
remoteQueryHistoryItem,
|
||||
);
|
||||
|
||||
const remoteQuery = remoteQueryHistoryItem.remoteQuery;
|
||||
const fullName = `${remoteQuery.controllerRepository.owner}/${remoteQuery.controllerRepository.name}`;
|
||||
expect(actionsWorkflowRunUrl).toBe(
|
||||
`https://github.com/${fullName}/actions/runs/${remoteQuery.actionsWorkflowRunId}`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should get the run url for variant analysis history items", () => {
|
||||
const actionsWorkflowRunUrl = getActionsWorkflowRunUrl(
|
||||
variantAnalysisHistoryItem,
|
||||
|
||||
@@ -11,7 +11,6 @@ import { LocalQueryInfo } from "../../../../src/query-results";
|
||||
import { DatabaseManager } from "../../../../src/databases";
|
||||
import { tmpDir } from "../../../../src/helpers";
|
||||
import { HistoryItemLabelProvider } from "../../../../src/query-history/history-item-label-provider";
|
||||
import { RemoteQueriesManager } from "../../../../src/remote-queries/remote-queries-manager";
|
||||
import { ResultsView } from "../../../../src/interface";
|
||||
import { EvalLogViewer } from "../../../../src/eval-log-viewer";
|
||||
import { QueryRunner } from "../../../../src/queryRunner";
|
||||
@@ -21,20 +20,14 @@ import {
|
||||
createMockLocalQueryInfo,
|
||||
createMockQueryWithResults,
|
||||
} from "../../../factories/query-history/local-query-history-item";
|
||||
import { createMockRemoteQueryHistoryItem } from "../../../factories/query-history/remote-query-history-item";
|
||||
import { RemoteQueryHistoryItem } from "../../../../src/remote-queries/remote-query-history-item";
|
||||
import { shuffleHistoryItems } from "../../utils/query-history-helpers";
|
||||
import { createMockVariantAnalysisHistoryItem } from "../../../factories/query-history/variant-analysis-history-item";
|
||||
import { VariantAnalysisHistoryItem } from "../../../../src/query-history/variant-analysis-history-item";
|
||||
import { QueryStatus } from "../../../../src/query-status";
|
||||
import { VariantAnalysisStatus } from "../../../../src/remote-queries/shared/variant-analysis";
|
||||
import * as ghActionsApiClient from "../../../../src/remote-queries/gh-api/gh-actions-api-client";
|
||||
import { QuickPickItem, TextEditor } from "vscode";
|
||||
import { WebviewReveal } from "../../../../src/interface-utils";
|
||||
import * as helpers from "../../../../src/helpers";
|
||||
import { testCredentialsWithStub } from "../../../factories/authentication";
|
||||
import { Credentials } from "../../../../src/common/authentication";
|
||||
import { createMockApp } from "../../../__mocks__/appMock";
|
||||
|
||||
describe("QueryHistoryManager", () => {
|
||||
const mockExtensionLocation = join(tmpDir.name, "mock-extension-location");
|
||||
@@ -54,14 +47,12 @@ describe("QueryHistoryManager", () => {
|
||||
let queryHistoryManager: QueryHistoryManager;
|
||||
|
||||
let localQueriesResultsViewStub: ResultsView;
|
||||
let remoteQueriesManagerStub: RemoteQueriesManager;
|
||||
let variantAnalysisManagerStub: VariantAnalysisManager;
|
||||
|
||||
let tryOpenExternalFile: Function;
|
||||
|
||||
let allHistory: QueryHistoryInfo[];
|
||||
let localQueryHistory: LocalQueryInfo[];
|
||||
let remoteQueryHistory: RemoteQueryHistoryItem[];
|
||||
let variantAnalysisHistory: VariantAnalysisHistoryItem[];
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -86,13 +77,6 @@ describe("QueryHistoryManager", () => {
|
||||
localQueriesResultsViewStub = {
|
||||
showResults: jest.fn(),
|
||||
} as any as ResultsView;
|
||||
remoteQueriesManagerStub = {
|
||||
onRemoteQueryAdded: jest.fn(),
|
||||
onRemoteQueryRemoved: jest.fn(),
|
||||
onRemoteQueryStatusUpdate: jest.fn(),
|
||||
removeRemoteQuery: jest.fn(),
|
||||
openRemoteQueryResults: jest.fn(),
|
||||
} as any as RemoteQueriesManager;
|
||||
|
||||
variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: jest.fn(),
|
||||
@@ -136,12 +120,6 @@ describe("QueryHistoryManager", () => {
|
||||
// in progress
|
||||
createMockLocalQueryInfo({ resultCount: 0 }),
|
||||
];
|
||||
remoteQueryHistory = [
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.Completed }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.Failed }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.InProgress }),
|
||||
createMockRemoteQueryHistoryItem({ status: QueryStatus.InProgress }),
|
||||
];
|
||||
variantAnalysisHistory = [
|
||||
createMockVariantAnalysisHistoryItem({
|
||||
historyItemStatus: QueryStatus.Completed,
|
||||
@@ -162,7 +140,6 @@ describe("QueryHistoryManager", () => {
|
||||
];
|
||||
allHistory = shuffleHistoryItems([
|
||||
...localQueryHistory,
|
||||
...remoteQueryHistory,
|
||||
...variantAnalysisHistory,
|
||||
]);
|
||||
});
|
||||
@@ -256,42 +233,6 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("remote query", () => {
|
||||
describe("when complete", () => {
|
||||
it("should show results", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
const itemClicked = remoteQueryHistory[0];
|
||||
await queryHistoryManager.handleItemClicked(itemClicked, [
|
||||
itemClicked,
|
||||
]);
|
||||
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).toHaveBeenCalledTimes(1);
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).toHaveBeenCalledWith(itemClicked.queryId);
|
||||
expect(queryHistoryManager.treeDataProvider.getCurrent()).toBe(
|
||||
itemClicked,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when incomplete", () => {
|
||||
it("should do nothing", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
const itemClicked = remoteQueryHistory[2];
|
||||
await queryHistoryManager.handleItemClicked(itemClicked, [
|
||||
itemClicked,
|
||||
]);
|
||||
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).not.toBeCalledWith(itemClicked.queryId);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("variant analysis", () => {
|
||||
describe("when complete", () => {
|
||||
it("should show results", async () => {
|
||||
@@ -347,9 +288,6 @@ describe("QueryHistoryManager", () => {
|
||||
]);
|
||||
|
||||
expect(localQueriesResultsViewStub.showResults).not.toHaveBeenCalled();
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).not.toHaveBeenCalled();
|
||||
expect(variantAnalysisManagerStub.showView).not.toBeCalled();
|
||||
expect(
|
||||
queryHistoryManager.treeDataProvider.getCurrent(),
|
||||
@@ -364,9 +302,6 @@ describe("QueryHistoryManager", () => {
|
||||
await queryHistoryManager.handleItemClicked(undefined!, []);
|
||||
|
||||
expect(localQueriesResultsViewStub.showResults).not.toHaveBeenCalled();
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).not.toHaveBeenCalled();
|
||||
expect(variantAnalysisManagerStub.showView).not.toHaveBeenCalled();
|
||||
expect(
|
||||
queryHistoryManager.treeDataProvider.getCurrent(),
|
||||
@@ -476,102 +411,6 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("when the item is a remote query", () => {
|
||||
describe("when the item being removed is not selected", () => {
|
||||
let toDelete: RemoteQueryHistoryItem;
|
||||
let selected: RemoteQueryHistoryItem;
|
||||
|
||||
beforeEach(async () => {
|
||||
// deleting the first item when a different item is selected
|
||||
// will not change the selection
|
||||
toDelete = remoteQueryHistory[1];
|
||||
selected = remoteQueryHistory[3];
|
||||
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
// initialize the selection
|
||||
await queryHistoryManager.treeView.reveal(remoteQueryHistory[0], {
|
||||
select: true,
|
||||
});
|
||||
|
||||
// select the item we want
|
||||
await queryHistoryManager.treeView.reveal(selected, {
|
||||
select: true,
|
||||
});
|
||||
|
||||
// should be selected
|
||||
expect(queryHistoryManager.treeDataProvider.getCurrent()).toEqual(
|
||||
selected,
|
||||
);
|
||||
|
||||
// remove an item
|
||||
await queryHistoryManager.handleRemoveHistoryItem(toDelete, [
|
||||
toDelete,
|
||||
]);
|
||||
});
|
||||
|
||||
it("should remove the item", () => {
|
||||
expect(
|
||||
remoteQueriesManagerStub.removeRemoteQuery,
|
||||
).toHaveBeenCalledWith(toDelete.queryId);
|
||||
expect(queryHistoryManager.treeDataProvider.allHistory).not.toContain(
|
||||
toDelete,
|
||||
);
|
||||
});
|
||||
|
||||
it("should not change the selection", () => {
|
||||
expect(queryHistoryManager.treeDataProvider.getCurrent()).toEqual(
|
||||
selected,
|
||||
);
|
||||
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).toHaveBeenCalledWith(selected.queryId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when the item being removed is selected", () => {
|
||||
let toDelete: RemoteQueryHistoryItem;
|
||||
let newSelected: RemoteQueryHistoryItem;
|
||||
|
||||
beforeEach(async () => {
|
||||
// deleting the selected item automatically selects next item
|
||||
toDelete = remoteQueryHistory[1];
|
||||
newSelected = remoteQueryHistory[2];
|
||||
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
remoteQueryHistory,
|
||||
);
|
||||
|
||||
// select the item we want
|
||||
await queryHistoryManager.treeView.reveal(toDelete, {
|
||||
select: true,
|
||||
});
|
||||
await queryHistoryManager.handleRemoveHistoryItem(toDelete, [
|
||||
toDelete,
|
||||
]);
|
||||
});
|
||||
|
||||
it("should remove the item", () => {
|
||||
expect(
|
||||
remoteQueriesManagerStub.removeRemoteQuery,
|
||||
).toHaveBeenCalledWith(toDelete.queryId);
|
||||
expect(queryHistoryManager.treeDataProvider.allHistory).not.toContain(
|
||||
toDelete,
|
||||
);
|
||||
});
|
||||
|
||||
it.skip("should change the selection", () => {
|
||||
expect(queryHistoryManager.treeDataProvider.getCurrent()).toEqual(
|
||||
newSelected,
|
||||
);
|
||||
expect(
|
||||
remoteQueriesManagerStub.openRemoteQueryResults,
|
||||
).toHaveBeenCalledWith(newSelected.queryId);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("when the item is a variant analysis", () => {
|
||||
let showBinaryChoiceDialogSpy: jest.SpiedFunction<
|
||||
typeof helpers.showBinaryChoiceDialog
|
||||
@@ -856,24 +695,9 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
|
||||
describe("handleCancel", () => {
|
||||
let mockCancelRemoteQuery: jest.SpiedFunction<
|
||||
typeof ghActionsApiClient.cancelRemoteQuery
|
||||
>;
|
||||
const getOctokitStub = jest.fn();
|
||||
const mockCredentials = testCredentialsWithStub(getOctokitStub);
|
||||
|
||||
beforeEach(async () => {
|
||||
mockCancelRemoteQuery = jest
|
||||
.spyOn(ghActionsApiClient, "cancelRemoteQuery")
|
||||
.mockResolvedValue();
|
||||
});
|
||||
|
||||
describe("if the item is in progress", () => {
|
||||
it("should cancel a single local query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = localQueryHistory[4];
|
||||
@@ -884,10 +708,7 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
|
||||
it("should cancel multiple local queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = localQueryHistory[4];
|
||||
@@ -904,51 +725,8 @@ describe("QueryHistoryManager", () => {
|
||||
expect(cancelSpy2).toBeCalled();
|
||||
});
|
||||
|
||||
it("should cancel a single remote query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = remoteQueryHistory[2];
|
||||
|
||||
await queryHistoryManager.handleCancel(inProgress1, [inProgress1]);
|
||||
expect(mockCancelRemoteQuery).toBeCalledWith(
|
||||
mockCredentials,
|
||||
inProgress1.remoteQuery,
|
||||
);
|
||||
});
|
||||
|
||||
it("should cancel multiple remote queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = remoteQueryHistory[2];
|
||||
const inProgress2 = remoteQueryHistory[3];
|
||||
|
||||
await queryHistoryManager.handleCancel(inProgress1, [
|
||||
inProgress1,
|
||||
inProgress2,
|
||||
]);
|
||||
expect(mockCancelRemoteQuery).toBeCalledWith(
|
||||
mockCredentials,
|
||||
inProgress1.remoteQuery,
|
||||
);
|
||||
expect(mockCancelRemoteQuery).toBeCalledWith(
|
||||
mockCredentials,
|
||||
inProgress2.remoteQuery,
|
||||
);
|
||||
});
|
||||
|
||||
it("should cancel a single variant analysis", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = variantAnalysisHistory[1];
|
||||
@@ -961,10 +739,7 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
|
||||
it("should cancel multiple variant analyses", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const inProgress1 = variantAnalysisHistory[1];
|
||||
@@ -987,10 +762,7 @@ describe("QueryHistoryManager", () => {
|
||||
|
||||
describe("if the item is not in progress", () => {
|
||||
it("should not cancel a single local query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const completed = localQueryHistory[0];
|
||||
@@ -1001,10 +773,7 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
|
||||
it("should not cancel multiple local queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const completed = localQueryHistory[0];
|
||||
@@ -1018,48 +787,8 @@ describe("QueryHistoryManager", () => {
|
||||
expect(cancelSpy2).not.toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it("should not cancel a single remote query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
|
||||
// cancelling the selected item
|
||||
const completed = remoteQueryHistory[0];
|
||||
|
||||
await queryHistoryManager.handleCancel(completed, [completed]);
|
||||
expect(mockCancelRemoteQuery).not.toBeCalledWith(
|
||||
mockCredentials,
|
||||
completed.remoteQuery,
|
||||
);
|
||||
});
|
||||
|
||||
it("should not cancel multiple remote queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
|
||||
// cancelling the selected item
|
||||
const completed = remoteQueryHistory[0];
|
||||
const failed = remoteQueryHistory[1];
|
||||
|
||||
await queryHistoryManager.handleCancel(completed, [completed, failed]);
|
||||
expect(mockCancelRemoteQuery).not.toBeCalledWith(
|
||||
mockCredentials,
|
||||
completed.remoteQuery,
|
||||
);
|
||||
expect(mockCancelRemoteQuery).not.toBeCalledWith(
|
||||
mockCredentials,
|
||||
failed.remoteQuery,
|
||||
);
|
||||
});
|
||||
|
||||
it("should not cancel a single variant analysis", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const completedVariantAnalysis = variantAnalysisHistory[0];
|
||||
@@ -1074,10 +803,7 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
|
||||
it("should not cancel multiple variant analyses", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(
|
||||
localQueryHistory,
|
||||
mockCredentials,
|
||||
);
|
||||
queryHistoryManager = await createMockQueryHistory(localQueryHistory);
|
||||
|
||||
// cancelling the selected item
|
||||
const completedVariantAnalysis = variantAnalysisHistory[0];
|
||||
@@ -1109,26 +835,6 @@ describe("QueryHistoryManager", () => {
|
||||
expect(executeCommandSpy).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should copy repo list for a single remote query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
const item = remoteQueryHistory[1];
|
||||
await queryHistoryManager.handleCopyRepoList(item, [item]);
|
||||
expect(executeCommandSpy).toBeCalledWith(
|
||||
"codeQL.copyRepoList",
|
||||
item.queryId,
|
||||
);
|
||||
});
|
||||
|
||||
it("should not copy repo list for multiple remote queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
const item1 = remoteQueryHistory[1];
|
||||
const item2 = remoteQueryHistory[3];
|
||||
await queryHistoryManager.handleCopyRepoList(item1, [item1, item2]);
|
||||
expect(executeCommandSpy).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should copy repo list for a single variant analysis", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
@@ -1160,26 +866,6 @@ describe("QueryHistoryManager", () => {
|
||||
expect(executeCommandSpy).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should export results for a single remote query", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
const item = remoteQueryHistory[1];
|
||||
await queryHistoryManager.handleExportResults(item, [item]);
|
||||
expect(executeCommandSpy).toBeCalledWith(
|
||||
"codeQL.exportRemoteQueryResults",
|
||||
item.queryId,
|
||||
);
|
||||
});
|
||||
|
||||
it("should not export results for multiple remote queries", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
const item1 = remoteQueryHistory[1];
|
||||
const item2 = remoteQueryHistory[3];
|
||||
await queryHistoryManager.handleExportResults(item1, [item1, item2]);
|
||||
expect(executeCommandSpy).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should export results for a single variant analysis", async () => {
|
||||
queryHistoryManager = await createMockQueryHistory(allHistory);
|
||||
|
||||
@@ -1459,16 +1145,11 @@ describe("QueryHistoryManager", () => {
|
||||
});
|
||||
});
|
||||
|
||||
async function createMockQueryHistory(
|
||||
allHistory: QueryHistoryInfo[],
|
||||
credentials?: Credentials,
|
||||
) {
|
||||
async function createMockQueryHistory(allHistory: QueryHistoryInfo[]) {
|
||||
const qhm = new QueryHistoryManager(
|
||||
createMockApp({ credentials }),
|
||||
{} as QueryRunner,
|
||||
{} as DatabaseManager,
|
||||
localQueriesResultsViewStub,
|
||||
remoteQueriesManagerStub,
|
||||
variantAnalysisManagerStub,
|
||||
{} as EvalLogViewer,
|
||||
"xxx",
|
||||
|
||||
@@ -1,537 +0,0 @@
|
||||
import {
|
||||
readJSONSync,
|
||||
ensureDir,
|
||||
copy,
|
||||
remove,
|
||||
readFileSync,
|
||||
writeFileSync,
|
||||
} from "fs-extra";
|
||||
import { join } from "path";
|
||||
|
||||
import {
|
||||
CancellationToken,
|
||||
ExtensionContext,
|
||||
TextDocument,
|
||||
TextEditor,
|
||||
Uri,
|
||||
window,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import { QueryHistoryConfig } from "../../../../src/config";
|
||||
import { DatabaseManager } from "../../../../src/databases";
|
||||
import { tmpDir, walkDirectory } from "../../../../src/helpers";
|
||||
import { AnalysesResultsManager } from "../../../../src/remote-queries/analyses-results-manager";
|
||||
import { RemoteQueryResult } from "../../../../src/remote-queries/shared/remote-query-result";
|
||||
import { DisposableBucket } from "../../disposable-bucket";
|
||||
import { testDisposeHandler } from "../../test-dispose-handler";
|
||||
import { HistoryItemLabelProvider } from "../../../../src/query-history/history-item-label-provider";
|
||||
import { RemoteQueriesManager } from "../../../../src/remote-queries/remote-queries-manager";
|
||||
import { ResultsView } from "../../../../src/interface";
|
||||
import { EvalLogViewer } from "../../../../src/eval-log-viewer";
|
||||
import { QueryRunner } from "../../../../src/queryRunner";
|
||||
import { VariantAnalysisManager } from "../../../../src/remote-queries/variant-analysis-manager";
|
||||
import { App } from "../../../../src/common/app";
|
||||
import { createMockApp } from "../../../__mocks__/appMock";
|
||||
import { testCredentialsWithStub } from "../../../factories/authentication";
|
||||
import { QueryHistoryManager } from "../../../../src/query-history/query-history-manager";
|
||||
|
||||
// set a higher timeout since recursive delete may take a while, expecially on Windows.
|
||||
jest.setTimeout(120000);
|
||||
|
||||
/**
|
||||
* Tests for remote queries and how they interact with the query history manager.
|
||||
*/
|
||||
|
||||
describe("Remote queries and query history manager", () => {
|
||||
const EXTENSION_PATH = join(__dirname, "../../../../");
|
||||
const STORAGE_DIR = Uri.file(join(tmpDir.name, "remote-queries")).fsPath;
|
||||
const asyncNoop = async () => {
|
||||
/** noop */
|
||||
};
|
||||
|
||||
const mockOctokit = jest.fn();
|
||||
let app: App;
|
||||
let qhm: QueryHistoryManager;
|
||||
const localQueriesResultsViewStub = {
|
||||
showResults: jest.fn(),
|
||||
} as any as ResultsView;
|
||||
let rawQueryHistory: any;
|
||||
let remoteQueryResult0: RemoteQueryResult;
|
||||
let remoteQueryResult1: RemoteQueryResult;
|
||||
let disposables: DisposableBucket;
|
||||
|
||||
const rehydrateRemoteQueryStub = jest.fn();
|
||||
const removeRemoteQueryStub = jest.fn();
|
||||
const openRemoteQueryResultsStub = jest.fn();
|
||||
|
||||
const remoteQueriesManagerStub = {
|
||||
onRemoteQueryAdded: jest.fn(),
|
||||
onRemoteQueryRemoved: jest.fn(),
|
||||
onRemoteQueryStatusUpdate: jest.fn(),
|
||||
rehydrateRemoteQuery: rehydrateRemoteQueryStub,
|
||||
removeRemoteQuery: removeRemoteQueryStub,
|
||||
openRemoteQueryResults: openRemoteQueryResultsStub,
|
||||
} as any as RemoteQueriesManager;
|
||||
|
||||
const variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: jest.fn(),
|
||||
onVariantAnalysisStatusUpdated: jest.fn(),
|
||||
onVariantAnalysisRemoved: jest.fn(),
|
||||
} as any as VariantAnalysisManager;
|
||||
|
||||
let showTextDocumentSpy: jest.SpiedFunction<typeof window.showTextDocument>;
|
||||
let openTextDocumentSpy: jest.SpiedFunction<
|
||||
typeof workspace.openTextDocument
|
||||
>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Since these tests change the state of the query history manager, we need to copy the original
|
||||
// to a temporary folder where we can manipulate it for tests
|
||||
await copyHistoryState();
|
||||
|
||||
disposables = new DisposableBucket();
|
||||
|
||||
rawQueryHistory = readJSONSync(
|
||||
join(STORAGE_DIR, "workspace-query-history.json"),
|
||||
).queries;
|
||||
remoteQueryResult0 = readJSONSync(
|
||||
join(
|
||||
STORAGE_DIR,
|
||||
"queries",
|
||||
rawQueryHistory[0].queryId,
|
||||
"query-result.json",
|
||||
),
|
||||
);
|
||||
remoteQueryResult1 = readJSONSync(
|
||||
join(
|
||||
STORAGE_DIR,
|
||||
"queries",
|
||||
rawQueryHistory[1].queryId,
|
||||
"query-result.json",
|
||||
),
|
||||
);
|
||||
|
||||
app = createMockApp({ credentials: testCredentialsWithStub(mockOctokit) });
|
||||
qhm = new QueryHistoryManager(
|
||||
app,
|
||||
{} as QueryRunner,
|
||||
{} as DatabaseManager,
|
||||
localQueriesResultsViewStub,
|
||||
remoteQueriesManagerStub,
|
||||
variantAnalysisManagerStub,
|
||||
{} as EvalLogViewer,
|
||||
STORAGE_DIR,
|
||||
{
|
||||
globalStorageUri: Uri.file(STORAGE_DIR),
|
||||
extensionPath: EXTENSION_PATH,
|
||||
} as ExtensionContext,
|
||||
{
|
||||
onDidChangeConfiguration: () => new DisposableBucket(),
|
||||
} as unknown as QueryHistoryConfig,
|
||||
new HistoryItemLabelProvider({} as QueryHistoryConfig),
|
||||
asyncNoop,
|
||||
);
|
||||
disposables.push(qhm);
|
||||
|
||||
showTextDocumentSpy = jest
|
||||
.spyOn(window, "showTextDocument")
|
||||
.mockResolvedValue(undefined as unknown as TextEditor);
|
||||
openTextDocumentSpy = jest
|
||||
.spyOn(workspace, "openTextDocument")
|
||||
.mockResolvedValue(undefined as unknown as TextDocument);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await deleteHistoryState();
|
||||
disposables.dispose(testDisposeHandler);
|
||||
});
|
||||
|
||||
it("should read query history", async () => {
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
// Should have added the query history. Contents are directly from the file
|
||||
expect(rehydrateRemoteQueryStub).toBeCalledTimes(2);
|
||||
expect(rehydrateRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
rawQueryHistory[0].queryId,
|
||||
rawQueryHistory[0].remoteQuery,
|
||||
rawQueryHistory[0].status,
|
||||
);
|
||||
expect(rehydrateRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
rawQueryHistory[1].queryId,
|
||||
rawQueryHistory[1].remoteQuery,
|
||||
rawQueryHistory[1].status,
|
||||
);
|
||||
|
||||
expect(qhm.treeDataProvider.allHistory[0]).toEqual(rawQueryHistory[0]);
|
||||
expect(qhm.treeDataProvider.allHistory[1]).toEqual(rawQueryHistory[1]);
|
||||
expect(qhm.treeDataProvider.allHistory.length).toBe(2);
|
||||
});
|
||||
|
||||
it("should remove and then add query from history", async () => {
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
// Remove the first query
|
||||
await qhm.handleRemoveHistoryItem(qhm.treeDataProvider.allHistory[0]);
|
||||
|
||||
expect(removeRemoteQueryStub).toHaveBeenCalledWith(
|
||||
rawQueryHistory[0].queryId,
|
||||
);
|
||||
expect(rehydrateRemoteQueryStub).toBeCalledTimes(2);
|
||||
expect(rehydrateRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
rawQueryHistory[0].queryId,
|
||||
rawQueryHistory[0].remoteQuery,
|
||||
rawQueryHistory[0].status,
|
||||
);
|
||||
expect(rehydrateRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
rawQueryHistory[1].queryId,
|
||||
rawQueryHistory[1].remoteQuery,
|
||||
rawQueryHistory[1].status,
|
||||
);
|
||||
expect(openRemoteQueryResultsStub).toHaveBeenCalledWith(
|
||||
rawQueryHistory[1].queryId,
|
||||
);
|
||||
expect(qhm.treeDataProvider.allHistory).toEqual(rawQueryHistory.slice(1));
|
||||
|
||||
// Add it back
|
||||
qhm.addQuery(rawQueryHistory[0]);
|
||||
expect(removeRemoteQueryStub).toBeCalledTimes(1);
|
||||
expect(rehydrateRemoteQueryStub).toBeCalledTimes(2);
|
||||
expect(qhm.treeDataProvider.allHistory).toEqual([
|
||||
rawQueryHistory[1],
|
||||
rawQueryHistory[0],
|
||||
]);
|
||||
});
|
||||
|
||||
it("should remove two queries from history", async () => {
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
// Remove the both queries
|
||||
// Just for fun, let's do it in reverse order
|
||||
await qhm.handleRemoveHistoryItem(undefined!, [
|
||||
qhm.treeDataProvider.allHistory[1],
|
||||
qhm.treeDataProvider.allHistory[0],
|
||||
]);
|
||||
|
||||
expect(removeRemoteQueryStub).toHaveBeenCalledTimes(2);
|
||||
expect(removeRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
rawQueryHistory[1].queryId,
|
||||
);
|
||||
expect(removeRemoteQueryStub).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
rawQueryHistory[0].queryId,
|
||||
);
|
||||
expect(qhm.treeDataProvider.allHistory).toEqual([]);
|
||||
|
||||
// also, both queries should be removed from on disk storage
|
||||
expect(
|
||||
readJSONSync(join(STORAGE_DIR, "workspace-query-history.json")),
|
||||
).toEqual({
|
||||
version: 2,
|
||||
queries: [],
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle a click", async () => {
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
await qhm.handleItemClicked(qhm.treeDataProvider.allHistory[0], []);
|
||||
expect(openRemoteQueryResultsStub).toHaveBeenCalledWith(
|
||||
rawQueryHistory[0].queryId,
|
||||
);
|
||||
});
|
||||
|
||||
it("should get the query text", async () => {
|
||||
await qhm.readQueryHistory();
|
||||
await qhm.handleShowQueryText(qhm.treeDataProvider.allHistory[0], []);
|
||||
|
||||
expect(showTextDocumentSpy).toBeCalledTimes(1);
|
||||
expect(openTextDocumentSpy).toBeCalledTimes(1);
|
||||
|
||||
const uri: Uri = openTextDocumentSpy.mock.calls[0][0] as Uri;
|
||||
expect(uri.scheme).toBe("codeql");
|
||||
const params = new URLSearchParams(uri.query);
|
||||
expect(params.get("isQuickEval")).toBe("false");
|
||||
expect(params.get("queryText")).toBe(
|
||||
rawQueryHistory[0].remoteQuery.queryText,
|
||||
);
|
||||
});
|
||||
|
||||
describe("AnalysisResultsManager", () => {
|
||||
let mockLogger: any;
|
||||
let mockCliServer: any;
|
||||
let arm: AnalysesResultsManager;
|
||||
|
||||
beforeEach(() => {
|
||||
mockLogger = {
|
||||
log: jest.fn(),
|
||||
};
|
||||
mockCliServer = {
|
||||
bqrsInfo: jest.fn(),
|
||||
bqrsDecode: jest.fn(),
|
||||
};
|
||||
|
||||
arm = new AnalysesResultsManager(
|
||||
app,
|
||||
mockCliServer,
|
||||
join(STORAGE_DIR, "queries"),
|
||||
mockLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it("should avoid re-downloading an analysis result", async () => {
|
||||
// because the analysis result is already in on disk, it should not be downloaded
|
||||
const publisher = jest.fn();
|
||||
const analysisSummary = remoteQueryResult0.analysisSummaries[0];
|
||||
await arm.downloadAnalysisResults(analysisSummary, publisher);
|
||||
|
||||
// Should not have made the request since the analysis result is already on disk
|
||||
expect(mockOctokit).not.toBeCalled();
|
||||
|
||||
// result should have been published twice
|
||||
expect(publisher).toHaveBeenCalledTimes(2);
|
||||
|
||||
// first time, it is in progress
|
||||
expect(publisher).toHaveBeenNthCalledWith(1, [
|
||||
expect.objectContaining({
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "InProgress",
|
||||
interpretedResults: expect.anything(), // avoid checking the interpretedResults object since it is complex
|
||||
}),
|
||||
]);
|
||||
|
||||
// second time, it has the path to the sarif file.
|
||||
expect(publisher).toHaveBeenNthCalledWith(2, [
|
||||
expect.objectContaining({
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "Completed",
|
||||
interpretedResults: expect.anything(), // avoid checking the interpretedResults object since it is complex
|
||||
}),
|
||||
]);
|
||||
|
||||
// result should be stored in the manager
|
||||
expect(
|
||||
arm.getAnalysesResults(rawQueryHistory[0].queryId)[0],
|
||||
).toMatchObject({
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "Completed",
|
||||
// interpretedResults: ... avoid checking the interpretedResults object since it is complex
|
||||
});
|
||||
publisher.mockClear();
|
||||
|
||||
// now, let's try to download it again. This time, since it's already in memory,
|
||||
// it should not even be re-published
|
||||
await arm.downloadAnalysisResults(analysisSummary, publisher);
|
||||
expect(publisher).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should download two artifacts at once", async () => {
|
||||
const publisher = jest.fn();
|
||||
const analysisSummaries = [
|
||||
remoteQueryResult0.analysisSummaries[0],
|
||||
remoteQueryResult0.analysisSummaries[1],
|
||||
];
|
||||
await arm.loadAnalysesResults(analysisSummaries, undefined, publisher);
|
||||
|
||||
const trimmed = publisher.mock.calls
|
||||
.map((call) => call[0])
|
||||
.map((args) => {
|
||||
args.forEach(
|
||||
(analysisResult: any) => delete analysisResult.interpretedResults,
|
||||
);
|
||||
return args;
|
||||
});
|
||||
|
||||
// As before, but now both summaries should have been published
|
||||
expect(trimmed[0]).toEqual([
|
||||
{
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "InProgress",
|
||||
resultCount: 15,
|
||||
lastUpdated: 1653447088649,
|
||||
starCount: 1,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(trimmed[1]).toEqual([
|
||||
{
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "InProgress",
|
||||
resultCount: 15,
|
||||
lastUpdated: 1653447088649,
|
||||
starCount: 1,
|
||||
},
|
||||
{
|
||||
nwo: "other/hucairz",
|
||||
status: "InProgress",
|
||||
resultCount: 15,
|
||||
lastUpdated: 1653447088649,
|
||||
starCount: 1,
|
||||
},
|
||||
]);
|
||||
|
||||
// there is a third call. It is non-deterministic if
|
||||
// github/vscode-codeql is completed first or other/hucairz is.
|
||||
// There is not much point in trying to test it if the other calls are correct.
|
||||
|
||||
expect(trimmed[3]).toEqual([
|
||||
{
|
||||
nwo: "github/vscode-codeql",
|
||||
status: "Completed",
|
||||
resultCount: 15,
|
||||
lastUpdated: 1653447088649,
|
||||
starCount: 1,
|
||||
},
|
||||
{
|
||||
nwo: "other/hucairz",
|
||||
status: "Completed",
|
||||
resultCount: 15,
|
||||
lastUpdated: 1653447088649,
|
||||
starCount: 1,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(publisher).toBeCalledTimes(4);
|
||||
});
|
||||
|
||||
it("should avoid publishing when the request is cancelled", async () => {
|
||||
const publisher = jest.fn();
|
||||
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
|
||||
|
||||
await expect(
|
||||
arm.loadAnalysesResults(
|
||||
analysisSummaries,
|
||||
{
|
||||
isCancellationRequested: true,
|
||||
} as CancellationToken,
|
||||
publisher,
|
||||
),
|
||||
).rejects.toThrow(/cancelled/);
|
||||
|
||||
expect(publisher).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should get the analysis results", async () => {
|
||||
const publisher = jest.fn();
|
||||
const analysisSummaries0 = [
|
||||
remoteQueryResult0.analysisSummaries[0],
|
||||
remoteQueryResult0.analysisSummaries[1],
|
||||
];
|
||||
const analysisSummaries1 = [...remoteQueryResult1.analysisSummaries];
|
||||
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries1, undefined, publisher);
|
||||
|
||||
const result0 = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
const result0Again = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
|
||||
// Shoule be equal, but not equivalent
|
||||
expect(result0).toEqual(result0Again);
|
||||
expect(result0).not.toBe(result0Again);
|
||||
|
||||
const result1 = arm.getAnalysesResults(rawQueryHistory[1].queryId);
|
||||
const result1Again = arm.getAnalysesResults(rawQueryHistory[1].queryId);
|
||||
expect(result1).toEqual(result1Again);
|
||||
expect(result1).not.toBe(result1Again);
|
||||
});
|
||||
|
||||
// This test is failing on windows in CI.
|
||||
it.skip("should read sarif", async () => {
|
||||
const publisher = jest.fn();
|
||||
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0]];
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
|
||||
const sarif = readJSONSync(
|
||||
join(
|
||||
STORAGE_DIR,
|
||||
"queries",
|
||||
rawQueryHistory[0].queryId,
|
||||
"171543249",
|
||||
"results.sarif",
|
||||
),
|
||||
);
|
||||
const queryResults = sarif.runs
|
||||
.flatMap((run: any) => run.results)
|
||||
.map((result: any) => ({ message: result.message.text }));
|
||||
|
||||
expect(publisher).toHaveBeenNthCalledWith(2, [
|
||||
{
|
||||
results: queryResults,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should check if an artifact is downloaded and not in memory", async () => {
|
||||
// Load remoteQueryResult0.analysisSummaries[1] into memory
|
||||
await arm.downloadAnalysisResults(
|
||||
remoteQueryResult0.analysisSummaries[1],
|
||||
() => Promise.resolve(),
|
||||
);
|
||||
|
||||
// on disk
|
||||
expect(
|
||||
await (arm as any).isAnalysisDownloaded(
|
||||
remoteQueryResult0.analysisSummaries[0],
|
||||
),
|
||||
).toBe(true);
|
||||
|
||||
// in memory
|
||||
expect(
|
||||
await (arm as any).isAnalysisDownloaded(
|
||||
remoteQueryResult0.analysisSummaries[1],
|
||||
),
|
||||
).toBe(true);
|
||||
|
||||
// not downloaded
|
||||
expect(
|
||||
await (arm as any).isAnalysisDownloaded(
|
||||
remoteQueryResult0.analysisSummaries[2],
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("should load downloaded artifacts", async () => {
|
||||
await arm.loadDownloadedAnalyses(remoteQueryResult0.analysisSummaries);
|
||||
const queryId = rawQueryHistory[0].queryId;
|
||||
const analysesResultsNwos = arm
|
||||
.getAnalysesResults(queryId)
|
||||
.map((ar) => ar.nwo)
|
||||
.sort();
|
||||
expect(analysesResultsNwos[0]).toBe("github/vscode-codeql");
|
||||
expect(analysesResultsNwos[1]).toBe("other/hucairz");
|
||||
expect(analysesResultsNwos.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
async function copyHistoryState() {
|
||||
await ensureDir(STORAGE_DIR);
|
||||
await ensureDir(join(tmpDir.name, "remote-queries"));
|
||||
await copy(
|
||||
join(__dirname, "../data/remote-queries/"),
|
||||
join(tmpDir.name, "remote-queries"),
|
||||
);
|
||||
|
||||
// also, replace the files with "PLACEHOLDER" so that they have the correct directory
|
||||
for await (const p of walkDirectory(STORAGE_DIR)) {
|
||||
replacePlaceholder(join(p));
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteHistoryState() {
|
||||
await remove(STORAGE_DIR);
|
||||
}
|
||||
|
||||
function replacePlaceholder(filePath: string) {
|
||||
if (filePath.endsWith(".json")) {
|
||||
const newContents = readFileSync(filePath, "utf8").replaceAll(
|
||||
"PLACEHOLDER",
|
||||
STORAGE_DIR.replaceAll("\\", "/"),
|
||||
);
|
||||
writeFileSync(filePath, newContents, "utf8");
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -15,13 +15,10 @@ import { tmpDir, walkDirectory } from "../../../../src/helpers";
|
||||
import { DisposableBucket } from "../../disposable-bucket";
|
||||
import { testDisposeHandler } from "../../test-dispose-handler";
|
||||
import { HistoryItemLabelProvider } from "../../../../src/query-history/history-item-label-provider";
|
||||
import { RemoteQueriesManager } from "../../../../src/remote-queries/remote-queries-manager";
|
||||
import { ResultsView } from "../../../../src/interface";
|
||||
import { EvalLogViewer } from "../../../../src/eval-log-viewer";
|
||||
import { QueryRunner } from "../../../../src/queryRunner";
|
||||
import { VariantAnalysisManager } from "../../../../src/remote-queries/variant-analysis-manager";
|
||||
import { App } from "../../../../src/common/app";
|
||||
import { createMockApp } from "../../../__mocks__/appMock";
|
||||
import { QueryHistoryManager } from "../../../../src/query-history/query-history-manager";
|
||||
|
||||
// set a higher timeout since recursive delete may take a while, expecially on Windows.
|
||||
@@ -38,7 +35,6 @@ describe("Variant Analyses and QueryHistoryManager", () => {
|
||||
/** noop */
|
||||
};
|
||||
|
||||
let app: App;
|
||||
let qhm: QueryHistoryManager;
|
||||
let rawQueryHistory: any;
|
||||
let disposables: DisposableBucket;
|
||||
@@ -50,14 +46,6 @@ describe("Variant Analyses and QueryHistoryManager", () => {
|
||||
const localQueriesResultsViewStub = {
|
||||
showResults: jest.fn(),
|
||||
} as any as ResultsView;
|
||||
const remoteQueriesManagerStub = {
|
||||
onRemoteQueryAdded: jest.fn(),
|
||||
onRemoteQueryRemoved: jest.fn(),
|
||||
onRemoteQueryStatusUpdate: jest.fn(),
|
||||
rehydrateRemoteQuery: jest.fn(),
|
||||
removeRemoteQuery: jest.fn(),
|
||||
openRemoteQueryResults: jest.fn(),
|
||||
} as any as RemoteQueriesManager;
|
||||
const variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: jest.fn(),
|
||||
onVariantAnalysisRemoved: jest.fn(),
|
||||
@@ -80,14 +68,10 @@ describe("Variant Analyses and QueryHistoryManager", () => {
|
||||
join(STORAGE_DIR, "workspace-query-history.json"),
|
||||
).queries;
|
||||
|
||||
app = createMockApp({});
|
||||
|
||||
qhm = new QueryHistoryManager(
|
||||
app,
|
||||
{} as QueryRunner,
|
||||
{} as DatabaseManager,
|
||||
localQueriesResultsViewStub,
|
||||
remoteQueriesManagerStub,
|
||||
variantAnalysisManagerStub,
|
||||
{} as EvalLogViewer,
|
||||
STORAGE_DIR,
|
||||
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
serializeQueryHistory,
|
||||
} from "../../../src/query-serialization";
|
||||
import { join } from "path";
|
||||
import { writeFileSync, mkdirpSync } from "fs-extra";
|
||||
import { writeFileSync, mkdirpSync, writeFile } from "fs-extra";
|
||||
import { LocalQueryInfo, InitialQueryInfo } from "../../../src/query-results";
|
||||
import { QueryWithResults } from "../../../src/run-queries-shared";
|
||||
import { DatabaseInfo } from "../../../src/pure/interface-types";
|
||||
@@ -11,11 +11,10 @@ import { CancellationTokenSource, Uri } from "vscode";
|
||||
import { tmpDir } from "../../../src/helpers";
|
||||
import { QueryResultType } from "../../../src/pure/legacy-messages";
|
||||
import { QueryInProgress } from "../../../src/legacy-query-server/run-queries";
|
||||
import { RemoteQueryHistoryItem } from "../../../src/remote-queries/remote-query-history-item";
|
||||
import { VariantAnalysisHistoryItem } from "../../../src/query-history/variant-analysis-history-item";
|
||||
import { QueryHistoryInfo } from "../../../src/query-history/query-history-info";
|
||||
import { createMockRemoteQueryHistoryItem } from "../../factories/query-history/remote-query-history-item";
|
||||
import { createMockVariantAnalysisHistoryItem } from "../../factories/query-history/variant-analysis-history-item";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
describe("serialize and deserialize", () => {
|
||||
let infoSuccessRaw: LocalQueryInfo;
|
||||
@@ -24,13 +23,11 @@ describe("serialize and deserialize", () => {
|
||||
let infoLateFailure: LocalQueryInfo;
|
||||
let infoInProgress: LocalQueryInfo;
|
||||
|
||||
let remoteQuery1: RemoteQueryHistoryItem;
|
||||
let remoteQuery2: RemoteQueryHistoryItem;
|
||||
|
||||
let variantAnalysis1: VariantAnalysisHistoryItem;
|
||||
let variantAnalysis2: VariantAnalysisHistoryItem;
|
||||
|
||||
let allHistory: QueryHistoryInfo[];
|
||||
let expectedHistory: QueryHistoryInfo[];
|
||||
let queryPath: string;
|
||||
let cnt = 0;
|
||||
|
||||
@@ -70,9 +67,6 @@ describe("serialize and deserialize", () => {
|
||||
);
|
||||
infoInProgress = createMockFullQueryInfo("e");
|
||||
|
||||
remoteQuery1 = createMockRemoteQueryHistoryItem({});
|
||||
remoteQuery2 = createMockRemoteQueryHistoryItem({});
|
||||
|
||||
variantAnalysis1 = createMockVariantAnalysisHistoryItem({});
|
||||
variantAnalysis2 = createMockVariantAnalysisHistoryItem({});
|
||||
|
||||
@@ -82,25 +76,21 @@ describe("serialize and deserialize", () => {
|
||||
infoEarlyFailure,
|
||||
infoLateFailure,
|
||||
infoInProgress,
|
||||
remoteQuery1,
|
||||
remoteQuery2,
|
||||
variantAnalysis1,
|
||||
variantAnalysis2,
|
||||
];
|
||||
|
||||
// the expected results only contains the history with completed queries
|
||||
expectedHistory = [
|
||||
infoSuccessRaw,
|
||||
infoSuccessInterpreted,
|
||||
infoLateFailure,
|
||||
variantAnalysis1,
|
||||
variantAnalysis2,
|
||||
];
|
||||
});
|
||||
|
||||
it("should serialize and deserialize query history", async () => {
|
||||
// the expected results only contains the history with completed queries
|
||||
const expectedHistory = [
|
||||
infoSuccessRaw,
|
||||
infoSuccessInterpreted,
|
||||
infoLateFailure,
|
||||
remoteQuery1,
|
||||
remoteQuery2,
|
||||
variantAnalysis1,
|
||||
variantAnalysis2,
|
||||
];
|
||||
|
||||
const allHistoryPath = join(tmpDir.name, "workspace-query-history.json");
|
||||
|
||||
// serialize and deserialize
|
||||
@@ -137,6 +127,61 @@ describe("serialize and deserialize", () => {
|
||||
expect(allHistoryActual.length).toEqual(expectedHistory.length);
|
||||
});
|
||||
|
||||
it("should remove remote queries from the history", async () => {
|
||||
const path = join(tmpDir.name, "query-history-with-remote.json");
|
||||
await writeFile(
|
||||
path,
|
||||
JSON.stringify({
|
||||
version: 2,
|
||||
queries: [
|
||||
...allHistory,
|
||||
{
|
||||
t: "remote",
|
||||
status: "InProgress",
|
||||
completed: false,
|
||||
queryId: nanoid(),
|
||||
remoteQuery: {
|
||||
queryName: "query-name",
|
||||
queryFilePath: "query-file.ql",
|
||||
queryText: "select 1",
|
||||
language: "javascript",
|
||||
controllerRepository: {
|
||||
owner: "github",
|
||||
name: "vscode-codeql-integration-tests",
|
||||
},
|
||||
executionStartTime: Date.now(),
|
||||
actionsWorkflowRunId: 1,
|
||||
repositoryCount: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
t: "remote",
|
||||
status: "Completed",
|
||||
completed: true,
|
||||
queryId: nanoid(),
|
||||
remoteQuery: {
|
||||
queryName: "query-name",
|
||||
queryFilePath: "query-file.ql",
|
||||
queryText: "select 1",
|
||||
language: "javascript",
|
||||
controllerRepository: {
|
||||
owner: "github",
|
||||
name: "vscode-codeql-integration-tests",
|
||||
},
|
||||
executionStartTime: Date.now(),
|
||||
actionsWorkflowRunId: 1,
|
||||
repositoryCount: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const actual = await deserializeQueryHistory(path);
|
||||
expect(actual.length).toEqual(expectedHistory.length);
|
||||
});
|
||||
|
||||
it("should handle an invalid query history version", async () => {
|
||||
const badPath = join(tmpDir.name, "bad-query-history.json");
|
||||
writeFileSync(
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
import { join } from "path";
|
||||
import { readFile } from "fs-extra";
|
||||
import * as markdownGenerator from "../../../../src/remote-queries/remote-queries-markdown-generation";
|
||||
import * as ghApiClient from "../../../../src/remote-queries/gh-api/gh-api-client";
|
||||
import { exportRemoteQueryAnalysisResults } from "../../../../src/remote-queries/export-results";
|
||||
import { testCredentialsWithStub } from "../../../factories/authentication";
|
||||
|
||||
describe("export results", () => {
|
||||
describe("exportRemoteQueryAnalysisResults", () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(markdownGenerator, "generateMarkdown").mockReturnValue([]);
|
||||
});
|
||||
|
||||
it("should call the GitHub Actions API with the correct gist title", async function () {
|
||||
const mockCreateGist = jest
|
||||
.spyOn(ghApiClient, "createGist")
|
||||
.mockResolvedValue(undefined);
|
||||
|
||||
const query = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"../data/remote-queries/query-with-results/query.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
const analysesResults = JSON.parse(
|
||||
await readFile(
|
||||
join(
|
||||
__dirname,
|
||||
"../data/remote-queries/query-with-results/analyses-results.json",
|
||||
),
|
||||
"utf8",
|
||||
),
|
||||
);
|
||||
|
||||
await exportRemoteQueryAnalysisResults(
|
||||
"",
|
||||
query,
|
||||
analysesResults,
|
||||
"gist",
|
||||
testCredentialsWithStub(),
|
||||
);
|
||||
|
||||
expect(mockCreateGist).toHaveBeenCalledTimes(1);
|
||||
expect(mockCreateGist).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
"Shell command built from environment values (javascript) 3 results (10 repositories)",
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user