Merge branch 'main' into aeisenberg/graph-viewer

This commit is contained in:
Andrew Eisenberg
2022-02-25 10:38:19 -08:00
82 changed files with 3623 additions and 698 deletions

View File

@@ -135,7 +135,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.0', 'nightly']
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.1', 'nightly']
env:
CLI_VERSION: ${{ matrix.version }}
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}

View File

@@ -2,10 +2,15 @@
## [UNRELEASED]
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
- Open the query server logs for query errors (instead of the extension log). This will make it easier to track down query errors. [#1158](https://github.com/github/vscode-codeql/pull/1158)
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
## 1.5.11 - 10 February 2022
- Fix a bug where invoking _View AST_ from the file explorer would not view the selected file. Instead it would view the active editor. Also, prevent the _View AST_ from appearing if the current selection includes a directory or multiple files. [#1113](https://github.com/github/vscode-codeql/pull/1113)
- Add query history items as soon as a query is run, including new icons for each history item. [#1094](https://github.com/github/vscode-codeql/pull/1094)
- Save query history items across restarts. Items will be saved for 30 days and can be overwritten by setting the `codeQL.queryHistory.ttl` configuration setting. [#1130](https://github.com/github/vscode-codeql/pull/1130)
- Allow in-progress query items to be cancelled from the query history view. [#1105](https://github.com/github/vscode-codeql/pull/1105)
## 1.5.10 - 25 January 2022
@@ -32,7 +37,7 @@
- Fix a bug with importing large databases. Databases over 4GB can now be imported directly from LGTM or from a zip file. This functionality is only available when using CodeQL CLI version 2.6.0 or later. [#971](https://github.com/github/vscode-codeql/pull/971)
- Replace certain control codes (`U+0000` - `U+001F`) with their corresponding control labels (`U+2400` - `U+241F`) in the results view. [#963](https://github.com/github/vscode-codeql/pull/963)
- Allow case-insensitive project slugs for GitHub repositories when adding a CodeQL database from LGTM. [#978](https://github.com/github/vscode-codeql/pull/961)
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See https://codeql.github.com/docs/codeql-cli/testing-query-help-files for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See [the CodeQL CLI docs](https://codeql.github.com/docs/codeql-cli/testing-query-help-files) for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
- Make "Open Referenced File" command accessible from the active editor menu. [#989](https://github.com/github/vscode-codeql/pull/989)
- Fix a bug where result set names in the result set drop-down were disappearing when viewing a sorted table. [#1007](https://github.com/github/vscode-codeql/pull/1007)
- Allow query result locations with 0 as the end column value. These are treated as the first column in the line. [#1002](https://github.com/github/vscode-codeql/pull/1002)

View File

@@ -0,0 +1,16 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="7.5" cy="7.5" r="7" stroke="#959DA5"/>
<mask id="mask0_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="15">
<circle cx="7.5" cy="7.5" r="7.5" fill="#C4C4C4"/>
</mask>
<g mask="url(#mask0_394_2982)">
<path d="M14.5 7.5C14.5 9.42971 13.6822 11.1907 12.5493 12.4721C11.4035 13.7683 10.0054 14.5 8.90625 14.5C7.84644 14.5 6.81131 13.8113 6.01569 12.5383C5.22447 11.2724 4.71875 9.49235 4.71875 7.5C4.71875 5.50765 5.22447 3.72765 6.01569 2.4617C6.81131 1.1887 7.84644 0.5 8.90625 0.5C10.0054 0.5 11.4035 1.23172 12.5493 2.52786C13.6822 3.80934 14.5 5.57029 14.5 7.5Z" stroke="#959DA5"/>
</g>
<mask id="mask1_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="1" y="0" width="16" height="15">
<circle cx="9.375" cy="7.5" r="7.5" fill="#C4C4C4"/>
</mask>
<g mask="url(#mask1_394_2982)">
<path d="M10.2812 7.5C10.2812 9.49235 9.77553 11.2724 8.98431 12.5383C8.18869 13.8113 7.15356 14.5 6.09375 14.5C4.99456 14.5 3.5965 13.7683 2.45067 12.4721C1.31781 11.1907 0.5 9.42971 0.5 7.5C0.5 5.57029 1.31781 3.80934 2.45067 2.52786C3.5965 1.23172 4.99456 0.5 6.09375 0.5C7.15356 0.5 8.18869 1.1887 8.98431 2.4617C9.77553 3.72765 10.2812 5.50765 10.2812 7.5Z" stroke="#959DA5"/>
</g>
<line y1="7.5" x2="15" y2="7.5" stroke="#959DA5"/>
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -59,7 +59,7 @@
"@types/jszip": "~3.1.6",
"@types/mocha": "^9.0.0",
"@types/nanoid": "^3.0.0",
"@types/node": "^12.14.1",
"@types/node": "^16.11.25",
"@types/node-fetch": "~2.5.2",
"@types/proxyquire": "~1.3.28",
"@types/react": "^17.0.2",
@@ -1316,11 +1316,6 @@
"@types/node": "*"
}
},
"node_modules/@types/glob/node_modules/@types/node": {
"version": "14.0.23",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.23.tgz",
"integrity": "sha512-Z4U8yDAl5TFkmYsZdFPdjeMa57NOvnaf1tljHzhouaPEp7LCj2JKkejpI1ODviIAQuW4CcQmxkQ77rnLsOOoKw=="
},
"node_modules/@types/google-protobuf": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/@types/google-protobuf/-/google-protobuf-3.7.2.tgz",
@@ -1534,10 +1529,9 @@
}
},
"node_modules/@types/node": {
"version": "12.19.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.19.4.tgz",
"integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==",
"dev": true
"version": "16.11.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.25.tgz",
"integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ=="
},
"node_modules/@types/node-fetch": {
"version": "2.5.7",
@@ -1605,12 +1599,6 @@
"@types/node": "*"
}
},
"node_modules/@types/semver/node_modules/@types/node": {
"version": "14.0.23",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.23.tgz",
"integrity": "sha512-Z4U8yDAl5TFkmYsZdFPdjeMa57NOvnaf1tljHzhouaPEp7LCj2JKkejpI1ODviIAQuW4CcQmxkQ77rnLsOOoKw==",
"dev": true
},
"node_modules/@types/sinon": {
"version": "7.5.2",
"resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-7.5.2.tgz",
@@ -14413,13 +14401,6 @@
"requires": {
"@types/minimatch": "*",
"@types/node": "*"
},
"dependencies": {
"@types/node": {
"version": "14.0.23",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.23.tgz",
"integrity": "sha512-Z4U8yDAl5TFkmYsZdFPdjeMa57NOvnaf1tljHzhouaPEp7LCj2JKkejpI1ODviIAQuW4CcQmxkQ77rnLsOOoKw=="
}
}
},
"@types/glob-stream": {
@@ -14610,10 +14591,9 @@
}
},
"@types/node": {
"version": "12.19.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.19.4.tgz",
"integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==",
"dev": true
"version": "16.11.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.25.tgz",
"integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ=="
},
"@types/node-fetch": {
"version": "2.5.7",
@@ -14679,14 +14659,6 @@
"dev": true,
"requires": {
"@types/node": "*"
},
"dependencies": {
"@types/node": {
"version": "14.0.23",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.23.tgz",
"integrity": "sha512-Z4U8yDAl5TFkmYsZdFPdjeMa57NOvnaf1tljHzhouaPEp7LCj2JKkejpI1ODviIAQuW4CcQmxkQ77rnLsOOoKw==",
"dev": true
}
}
},
"@types/sinon": {

View File

@@ -225,6 +225,12 @@
"default": "%q on %d - %s, %r result count [%t]",
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
},
"codeQL.queryHistory.ttl": {
"type": "number",
"default": 30,
"description": "Number of days to retain queries in the query history before being automatically deleted.",
"scope": "machine"
},
"codeQL.runningTests.additionalTestArguments": {
"scope": "window",
"type": "array",
@@ -673,7 +679,7 @@
{
"command": "codeQLQueryHistory.removeHistoryItem",
"group": "9_qlCommands",
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == cancelledResultsItem"
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem"
},
{
"command": "codeQLQueryHistory.setLabel",
@@ -1080,7 +1086,7 @@
"@types/jszip": "~3.1.6",
"@types/mocha": "^9.0.0",
"@types/nanoid": "^3.0.0",
"@types/node": "^12.14.1",
"@types/node": "^16.11.25",
"@types/node-fetch": "~2.5.2",
"@types/proxyquire": "~1.3.28",
"@types/react": "^17.0.2",

View File

@@ -1,5 +1,5 @@
/**
* The jszip and d3 libraries are designed to work in both the browser and
* The d3 library is designed to work in both the browser and
* node. Consequently their typings files refer to both node
* types like `Buffer` (which don't exist in the browser), and browser
* types like `Blob` (which don't exist in node). Instead of sticking
@@ -8,7 +8,6 @@
* succeeds.
*/
declare type Blob = string;
declare type RequestInit = Record<string, unknown>;
declare type ElementTagNameMap = any;
declare type NodeListOf<T> = Record<string, T>;

View File

@@ -404,7 +404,7 @@ export class CodeQLCliServer implements Disposable {
try {
if (cancellationToken !== undefined) {
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
tk(child.pid);
tk(child.pid || 0);
});
}
if (logger !== undefined) {
@@ -515,8 +515,7 @@ export class CodeQLCliServer implements Disposable {
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
const subcommandArgs = [
'--query', queryPath,
'--additional-packs',
workspaces.join(path.delimiter)
...this.getAdditionalPacksArg(workspaces)
];
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
}
@@ -529,8 +528,7 @@ export class CodeQLCliServer implements Disposable {
const subcommandArgs = [
'--format', 'bylanguage',
queryUri.fsPath,
'--additional-packs',
workspaces.join(path.delimiter)
...this.getAdditionalPacksArg(workspaces)
];
return JSON.parse(await this.runCodeQlCliCommand(['resolve', 'queries'], subcommandArgs, 'Resolving query by language'));
}
@@ -563,6 +561,17 @@ export class CodeQLCliServer implements Disposable {
);
}
/**
* Issues an internal clear-cache command to the cli server. This
* command is used to clear the qlpack cache of the server.
*
* This cache is generally cleared every 1s. This method is used
* to force an early clearing of the cache.
*/
public async clearCache(): Promise<void> {
await this.runCodeQlCliCommand(['clear-cache'], [], 'Clearing qlpack cache');
}
/**
* Runs QL tests.
* @param testPaths Full paths of the tests to run.
@@ -574,7 +583,7 @@ export class CodeQLCliServer implements Disposable {
): AsyncGenerator<TestCompleted, void, unknown> {
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
'--additional-packs', workspaces.join(path.delimiter),
...this.getAdditionalPacksArg(workspaces),
'--threads',
this.cliConfig.numberTestThreads.toString(),
...testPaths
@@ -596,8 +605,12 @@ export class CodeQLCliServer implements Disposable {
/** Resolves the ML models that should be available when evaluating a query. */
async resolveMlModels(additionalPacks: string[]): Promise<MlModelsInfo> {
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(['resolve', 'ml-models'], ['--additional-packs',
additionalPacks.join(path.delimiter)], 'Resolving ML models', false);
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
['resolve', 'ml-models'],
this.getAdditionalPacksArg(additionalPacks),
'Resolving ML models',
false
);
}
/**
@@ -725,9 +738,9 @@ export class CodeQLCliServer implements Disposable {
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
const additionalArgs = sourceInfo ? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}'] : [];
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
try {
const dot = await this.readDotFiles(interpretedResultsPath);
return dot;
@@ -783,7 +796,7 @@ export class CodeQLCliServer implements Disposable {
* @returns A list of database upgrade script directories
*/
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
const args = ['--additional-packs', searchPath.join(path.delimiter), '--dbscheme', dbScheme];
const args = [...this.getAdditionalPacksArg(searchPath), '--dbscheme', dbScheme];
if (targetDbScheme) {
args.push('--target-dbscheme', targetDbScheme);
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
@@ -805,7 +818,7 @@ export class CodeQLCliServer implements Disposable {
* @returns A dictionary mapping qlpack name to the directory it comes from
*/
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
const args = this.getAdditionalPacksArg(additionalPacks);
if (searchPath?.length) {
args.push('--search-path', path.join(...searchPath));
}
@@ -851,7 +864,7 @@ export class CodeQLCliServer implements Disposable {
* @returns A list of query files found.
*/
async resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
const args = this.getAdditionalPacksArg(additionalPacks);
if (searchPath !== undefined) {
args.push('--search-path', path.join(...searchPath));
}
@@ -884,8 +897,7 @@ export class CodeQLCliServer implements Disposable {
'-o',
outputPath,
dir,
'--additional-packs',
workspaceFolders.join(path.delimiter)
...this.getAdditionalPacksArg(workspaceFolders)
];
if (!precompile && await this.cliConstraints.supportsNoPrecompile()) {
args.push('--no-precompile');
@@ -940,6 +952,12 @@ export class CodeQLCliServer implements Disposable {
throw new Error('No distribution found');
}
}
private getAdditionalPacksArg(paths: string[]): string[] {
return paths.length
? ['--additional-packs', paths.join(path.delimiter)]
: [];
}
}
/**
@@ -1226,6 +1244,12 @@ export class CliVersionConstraint {
*/
public static CLI_VERSION_WITH_PACKAGING = new SemVer('2.6.0');
/**
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
* on a per-query server basis.
*/
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
constructor(private readonly cli: CodeQLCliServer) {
/**/
}
@@ -1281,4 +1305,8 @@ export class CliVersionConstraint {
async supportsPackaging() {
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PACKAGING);
}
async supportsStructuredEvalLog() {
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
}
}

View File

@@ -160,7 +160,8 @@ export function commandRunner(
export function commandRunnerWithProgress<R>(
commandId: string,
task: ProgressTask<R>,
progressOptions: Partial<ProgressOptions>
progressOptions: Partial<ProgressOptions>,
outputLogger = logger
): Disposable {
return commands.registerCommand(commandId, async (...args: any[]) => {
const startTime = Date.now();
@@ -177,9 +178,9 @@ export function commandRunnerWithProgress<R>(
if (e instanceof UserCancellationException) {
// User has cancelled this action manually
if (e.silent) {
void logger.log(errorMessage);
void outputLogger.log(errorMessage);
} else {
void showAndLogWarningMessage(errorMessage);
void showAndLogWarningMessage(errorMessage, { outputLogger });
}
} else {
// Include the full stack in the error log only.
@@ -187,6 +188,7 @@ export function commandRunnerWithProgress<R>(
? `${errorMessage}\n${e.stack}`
: errorMessage;
void showAndLogErrorMessage(errorMessage, {
outputLogger,
fullMessage
});
}

View File

@@ -8,7 +8,7 @@ import {
} from 'vscode';
import * as path from 'path';
import { tmpDir } from '../run-queries';
import { tmpDir } from '../helpers';
import {
FromCompareViewMessage,
ToCompareViewMessage,
@@ -20,11 +20,11 @@ import { DatabaseManager } from '../databases';
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
import resultsDiff from './resultsDiff';
import { FullCompletedQueryInfo } from '../query-results';
import { CompletedLocalQueryInfo } from '../query-results';
interface ComparePair {
from: FullCompletedQueryInfo;
to: FullCompletedQueryInfo;
from: CompletedLocalQueryInfo;
to: CompletedLocalQueryInfo;
}
export class CompareInterfaceManager extends DisposableObject {
@@ -39,15 +39,15 @@ export class CompareInterfaceManager extends DisposableObject {
private cliServer: CodeQLCliServer,
private logger: Logger,
private showQueryResultsCallback: (
item: FullCompletedQueryInfo
item: CompletedLocalQueryInfo
) => Promise<void>
) {
super();
}
async showResults(
from: FullCompletedQueryInfo,
to: FullCompletedQueryInfo,
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo,
selectedResultSetName?: string
) {
this.comparePair = { from, to };
@@ -188,8 +188,8 @@ export class CompareInterfaceManager extends DisposableObject {
}
private async findCommonResultSetNames(
from: FullCompletedQueryInfo,
to: FullCompletedQueryInfo,
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo,
selectedResultSetName: string | undefined
): Promise<[string[], string, RawResultSet, RawResultSet]> {
const fromSchemas = await this.cliServer.bqrsInfo(

View File

@@ -2,6 +2,7 @@ import { DisposableObject } from './pure/disposable-object';
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
import { DistributionManager } from './distribution';
import { logger } from './logging';
import { ONE_DAY_IN_MS } from './pure/helpers-pure';
/** Helper class to look up a labelled (and possibly nested) setting. */
export class Setting {
@@ -54,8 +55,11 @@ const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
// Query History configuration
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
const QUERY_HISTORY_TTL = new Setting('format', QUERY_HISTORY_SETTING);
/** When these settings change, the distribution should be updated. */
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
@@ -71,7 +75,6 @@ export interface DistributionConfig {
}
// Query server configuration
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
@@ -91,7 +94,10 @@ export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
/** When these settings change, the running query server should be restarted. */
const QUERY_SERVER_RESTARTING_SETTINGS = [NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING, DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING];
const QUERY_SERVER_RESTARTING_SETTINGS = [
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
];
export interface QueryServerConfig {
codeQlPath: string;
@@ -106,10 +112,11 @@ export interface QueryServerConfig {
}
/** When these settings change, the query history should be refreshed. */
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING];
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
export interface QueryHistoryConfig {
format: string;
ttlInMillis: number;
onDidChangeConfiguration: Event<void>;
}
@@ -251,6 +258,13 @@ export class QueryHistoryConfigListener extends ConfigListener implements QueryH
public get format(): string {
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
}
/**
* The configuration value is in days, but return the value in milliseconds to make it easier to use.
*/
public get ttlInMillis(): number {
return (QUERY_HISTORY_TTL.getValue<number>() || 30) * ONE_DAY_IN_MS;
}
}
export class CliConfigListener extends ConfigListener implements CliConfig {
@@ -343,15 +357,3 @@ export function getRemoteControllerRepo(): string | undefined {
export async function setRemoteControllerRepo(repo: string | undefined) {
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
}
/**
* Whether to insecurely load ML models from CodeQL packs.
*
* This setting is for internal users only.
*/
const SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS =
new Setting('shouldInsecurelyLoadModelsFromPacks', RUNNING_QUERIES_SETTING);
export function shouldInsecurelyLoadMlModelsFromPacks(): boolean {
return SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS.getValue<boolean>();
}

View File

@@ -28,6 +28,7 @@ export interface FullLocationLink extends LocationLink {
* @param dbm The database manager
* @param uriString The selected source file and location
* @param keyType The contextual query type to run
* @param queryStorageDir The directory to store the query results
* @param progress A progress callback
* @param token A CancellationToken
* @param filter A function that will filter extraneous results
@@ -38,6 +39,7 @@ export async function getLocationsForUriString(
dbm: DatabaseManager,
uriString: string,
keyType: KeyType,
queryStorageDir: string,
progress: ProgressCallback,
token: CancellationToken,
filter: (src: string, dest: string) => boolean
@@ -69,6 +71,7 @@ export async function getLocationsForUriString(
qs,
db,
initialInfo,
queryStorageDir,
progress,
token,
templates

View File

@@ -42,6 +42,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
private cli: CodeQLCliServer,
private qs: QueryServerClient,
private dbm: DatabaseManager,
private queryStorageDir: string,
) {
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
}
@@ -69,6 +70,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString
@@ -84,6 +86,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
private cli: CodeQLCliServer,
private qs: QueryServerClient,
private dbm: DatabaseManager,
private queryStorageDir: string,
) {
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
}
@@ -116,6 +119,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString
@@ -136,6 +140,7 @@ export class TemplatePrintAstProvider {
private cli: CodeQLCliServer,
private qs: QueryServerClient,
private dbm: DatabaseManager,
private queryStorageDir: string,
) {
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
}
@@ -216,6 +221,7 @@ export class TemplatePrintAstProvider {
this.qs,
db,
initialInfo,
this.queryStorageDir,
progress,
token,
templates
@@ -230,7 +236,7 @@ export class TemplatePrintCfgProvider {
constructor(
private cli: CodeQLCliServer,
private dbm: DatabaseManager
private dbm: DatabaseManager,
) {
this.cache = new CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>(this.getCfgUri.bind(this));
}

View File

@@ -20,7 +20,7 @@ import {
ProgressCallback,
} from './commandRunner';
import { logger } from './logging';
import { tmpDir } from './run-queries';
import { tmpDir } from './helpers';
/**
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
@@ -434,7 +434,7 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
}
return;
}
function extractProjectSlug(lgtmUrl: string): string | undefined {
// Only matches the '/g/' provider (github)
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');

View File

@@ -19,6 +19,7 @@ import {
} from 'vscode';
import { LanguageClient } from 'vscode-languageclient';
import * as os from 'os';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as tmp from 'tmp-promise';
import { testExplorerExtensionId, TestHub } from 'vscode-test-adapter-api';
@@ -54,17 +55,26 @@ import {
GithubApiError,
GithubRateLimitedError
} from './distribution';
import * as helpers from './helpers';
import {
findLanguage,
tmpDirDisposal,
showBinaryChoiceDialog,
showAndLogErrorMessage,
showAndLogWarningMessage,
showAndLogInformationMessage,
showInformationMessageWithAction,
tmpDir
} from './helpers';
import { assertNever } from './pure/helpers-pure';
import { spawnIdeServer } from './ide-server';
import { InterfaceManager } from './interface';
import { WebviewReveal } from './interface-utils';
import { ideServerLogger, logger, queryServerLogger } from './logging';
import { QueryHistoryManager } from './query-history';
import { FullCompletedQueryInfo, FullQueryInfo } from './query-results';
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
import * as qsClient from './queryserver-client';
import { displayQuickQuery } from './quick-query';
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo, tmpDirDisposal } from './run-queries';
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from './run-queries';
import { QLTestAdapterFactory } from './test-adapter';
import { TestUIService } from './test-ui';
import { CompareInterfaceManager } from './compare/compare-interface';
@@ -81,13 +91,13 @@ import { CodeQlStatusBarHandler } from './status-bar';
import { Credentials } from './authentication';
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
import { RemoteQuery } from './remote-queries/remote-query';
import { RemoteQueryResult } from './remote-queries/remote-query-result';
import { URLSearchParams } from 'url';
import { RemoteQueriesInterfaceManager } from './remote-queries/remote-queries-interface';
import * as sampleData from './remote-queries/sample-data';
import { handleDownloadPacks, handleInstallPackDependencies } from './packaging';
import { AnalysesResultsManager } from './remote-queries/analyses-results-manager';
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
/**
* extension.ts
@@ -189,7 +199,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
const shouldUpdateOnNextActivationKey = 'shouldUpdateOnNextActivation';
registerErrorStubs([checkForUpdatesCommand], command => (async () => {
void helpers.showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
void showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
}));
interface DistributionUpdateConfig {
@@ -201,7 +211,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
async function installOrUpdateDistributionWithProgressTitle(progressTitle: string, config: DistributionUpdateConfig): Promise<void> {
const minSecondsSinceLastUpdateCheck = config.isUserInitiated ? 0 : 86400;
const noUpdatesLoggingFunc = config.shouldDisplayMessageWhenNoUpdates ?
helpers.showAndLogInformationMessage : async (message: string) => void logger.log(message);
showAndLogInformationMessage : async (message: string) => void logger.log(message);
const result = await distributionManager.checkForUpdatesToExtensionManagedDistribution(minSecondsSinceLastUpdateCheck);
// We do want to auto update if there is no distribution at all
@@ -223,7 +233,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
const updateAvailableMessage = `Version "${result.updatedRelease.name}" of the CodeQL CLI is now available. ` +
'Do you wish to upgrade?';
await ctx.globalState.update(shouldUpdateOnNextActivationKey, true);
if (await helpers.showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
if (await showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
await commands.executeCommand('workbench.action.reloadWindow');
}
} else {
@@ -236,7 +246,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
distributionManager.installExtensionManagedDistributionRelease(result.updatedRelease, progress));
await ctx.globalState.update(shouldUpdateOnNextActivationKey, false);
void helpers.showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
void showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
}
break;
default:
@@ -263,7 +273,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
// Don't rethrow the exception, because if the config is changed, we want to be able to retry installing
// or updating the distribution.
const alertFunction = (codeQlInstalled && !config.isUserInitiated) ?
helpers.showAndLogWarningMessage : helpers.showAndLogErrorMessage;
showAndLogWarningMessage : showAndLogErrorMessage;
const taskDescription = (willUpdateCodeQl ? 'update' :
codeQlInstalled ? 'check for updates to' : 'install') + ' CodeQL CLI';
@@ -298,20 +308,20 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
}
})();
void helpers.showAndLogWarningMessage(
void showAndLogWarningMessage(
`The current version of the CodeQL CLI (${result.version.raw}) ` +
`is incompatible with this extension. ${fixGuidanceMessage}`
);
break;
}
case FindDistributionResultKind.UnknownCompatibilityDistribution:
void helpers.showAndLogWarningMessage(
void showAndLogWarningMessage(
'Compatibility with the configured CodeQL CLI could not be determined. ' +
'You may experience problems using the extension.'
);
break;
case FindDistributionResultKind.NoDistribution:
void helpers.showAndLogErrorMessage('The CodeQL CLI could not be found.');
void showAndLogErrorMessage('The CodeQL CLI could not be found.');
break;
default:
assertNever(result);
@@ -338,7 +348,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
} else if (distributionResult.kind === FindDistributionResultKind.NoDistribution) {
registerErrorStubs([checkForUpdatesCommand], command => async () => {
const installActionName = 'Install CodeQL CLI';
const chosenAction = await void helpers.showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
const chosenAction = await void showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
items: [installActionName]
});
if (chosenAction === installActionName) {
@@ -434,18 +444,28 @@ async function activateWithInstalledDistribution(
void logger.log('Initializing query history manager.');
const queryHistoryConfigurationListener = new QueryHistoryConfigListener();
ctx.subscriptions.push(queryHistoryConfigurationListener);
const showResults = async (item: FullCompletedQueryInfo) =>
const showResults = async (item: CompletedLocalQueryInfo) =>
showResultsForCompletedQuery(item, WebviewReveal.Forced);
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
await fs.ensureDir(queryStorageDir);
void logger.log('Initializing query history.');
const qhm = new QueryHistoryManager(
qs,
dbm,
ctx.extensionPath,
queryStorageDir,
ctx,
queryHistoryConfigurationListener,
showResults,
async (from: FullCompletedQueryInfo, to: FullCompletedQueryInfo) =>
async (from: CompletedLocalQueryInfo, to: CompletedLocalQueryInfo) =>
showResultsForComparison(from, to),
);
qhm.onWillOpenQueryItem(async item => {
if (item.t === 'local' && item.completed) {
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.Forced);
}
});
ctx.subscriptions.push(qhm);
void logger.log('Initializing results panel interface.');
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger);
@@ -465,18 +485,18 @@ async function activateWithInstalledDistribution(
archiveFilesystemProvider.activate(ctx);
async function showResultsForComparison(
from: FullCompletedQueryInfo,
to: FullCompletedQueryInfo
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo
): Promise<void> {
try {
await cmpm.showResults(from, to);
} catch (e) {
void helpers.showAndLogErrorMessage(e.message);
void showAndLogErrorMessage(e.message);
}
}
async function showResultsForCompletedQuery(
query: FullCompletedQueryInfo,
query: CompletedLocalQueryInfo,
forceReveal: WebviewReveal
): Promise<void> {
await intm.showResults(query, forceReveal, false);
@@ -506,7 +526,7 @@ async function activateWithInstalledDistribution(
token.onCancellationRequested(() => source.cancel());
const initialInfo = await createInitialQueryInfo(selectedQuery, databaseInfo, quickEval, range);
const item = new FullQueryInfo(initialInfo, queryHistoryConfigurationListener, source);
const item = new LocalQueryInfo(initialInfo, queryHistoryConfigurationListener, source);
qhm.addQuery(item);
try {
const completedQueryInfo = await compileAndRunQueryAgainstDatabase(
@@ -514,18 +534,20 @@ async function activateWithInstalledDistribution(
qs,
databaseItem,
initialInfo,
queryStorageDir,
progress,
source.token,
);
item.completeThisQuery(completedQueryInfo);
await showResultsForCompletedQuery(item as FullCompletedQueryInfo, WebviewReveal.NotForced);
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
// Note we must update the query history view after showing results as the
// display and sorting might depend on the number of results
} catch (e) {
e.message = `Error running query: ${e.message}`;
item.failureReason = e.message;
throw e;
} finally {
qhm.refreshTreeView();
await qhm.refreshTreeView();
source.dispose();
}
}
@@ -551,7 +573,7 @@ async function activateWithInstalledDistribution(
const errorMessage = err.message.includes('Generating qhelp in markdown') ? (
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
void helpers.showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
}
}
@@ -568,7 +590,7 @@ async function activateWithInstalledDistribution(
const uri = Uri.file(resolved.resolvedPath);
await window.showTextDocument(uri, { preview: false });
} else {
void helpers.showAndLogErrorMessage(
void showAndLogErrorMessage(
'Jumping from a .qlref file to the .ql file it references is not '
+ 'supported with the CLI version you are running.\n'
+ `Please upgrade your CLI to version ${CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF
@@ -622,7 +644,10 @@ async function activateWithInstalledDistribution(
{
title: 'Running query',
cancellable: true
}
},
// Open the query server logger on error since that's usually where the interesting errors appear.
queryServerLogger
)
);
interface DatabaseQuickPickItem extends QuickPickItem {
@@ -638,15 +663,15 @@ async function activateWithInstalledDistribution(
) => {
let filteredDBs = dbm.databaseItems;
if (filteredDBs.length === 0) {
void helpers.showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
void showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
return;
}
// If possible, only show databases with the right language (otherwise show all databases).
const queryLanguage = await helpers.findLanguage(cliServer, uri);
const queryLanguage = await findLanguage(cliServer, uri);
if (queryLanguage) {
filteredDBs = dbm.databaseItems.filter(db => db.language === queryLanguage);
if (filteredDBs.length === 0) {
void helpers.showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
void showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
return;
}
}
@@ -678,12 +703,12 @@ async function activateWithInstalledDistribution(
}
if (skippedDatabases.length > 0) {
void logger.log(`Errors:\n${errors.join('\n')}`);
void helpers.showAndLogWarningMessage(
void showAndLogWarningMessage(
`The following databases were skipped:\n${skippedDatabases.join('\n')}.\nFor details about the errors, see the logs.`
);
}
} else {
void helpers.showAndLogErrorMessage('No databases selected.');
void showAndLogErrorMessage('No databases selected.');
}
},
{
@@ -710,7 +735,7 @@ async function activateWithInstalledDistribution(
// files may be hidden from the user.
if (dirFound) {
const fileString = files.map(file => path.basename(file)).join(', ');
const res = await helpers.showBinaryChoiceDialog(
const res = await showBinaryChoiceDialog(
`You are about to run ${files.length} queries: ${fileString} Do you want to continue?`
);
if (!res) {
@@ -754,7 +779,11 @@ async function activateWithInstalledDistribution(
{
title: 'Running queries',
cancellable: true
})
},
// Open the query server logger on error since that's usually where the interesting errors appear.
queryServerLogger
)
);
ctx.subscriptions.push(
commandRunnerWithProgress(
@@ -767,7 +796,10 @@ async function activateWithInstalledDistribution(
{
title: 'Running query',
cancellable: true
})
},
// Open the query server logger on error since that's usually where the interesting errors appear.
queryServerLogger
)
);
ctx.subscriptions.push(
@@ -782,7 +814,11 @@ async function activateWithInstalledDistribution(
{
title: 'Running query',
cancellable: true
})
},
// Open the query server logger on error since that's usually where the interesting errors appear.
queryServerLogger
)
);
ctx.subscriptions.push(
@@ -793,12 +829,21 @@ async function activateWithInstalledDistribution(
displayQuickQuery(ctx, cliServer, databaseUI, progress, token),
{
title: 'Run Quick Query'
}
},
// Open the query server logger on error since that's usually where the interesting errors appear.
queryServerLogger
)
);
void logger.log('Initializing remote queries interface.');
const rqm = new RemoteQueriesManager(ctx, cliServer, logger);
const rqm = new RemoteQueriesManager(ctx, cliServer, qhm, queryStorageDir, logger);
ctx.subscriptions.push(rqm);
// wait until after the remote queries manager is initialized to read the query history
// since the rqm is notified of queries being added.
await qhm.readQueryHistory();
registerRemoteQueryTextProvider();
@@ -831,9 +876,9 @@ async function activateWithInstalledDistribution(
ctx.subscriptions.push(
commandRunner('codeQL.monitorRemoteQuery', async (
query: RemoteQuery,
queryItem: RemoteQueryHistoryItem,
token: CancellationToken) => {
await rqm.monitorRemoteQuery(query, token);
await rqm.monitorRemoteQuery(queryItem, token);
}));
ctx.subscriptions.push(
@@ -845,7 +890,7 @@ async function activateWithInstalledDistribution(
ctx.subscriptions.push(
commandRunner('codeQL.showFakeRemoteQueryResults', async () => {
const analysisResultsManager = new AnalysesResultsManager(ctx, logger);
const analysisResultsManager = new AnalysesResultsManager(ctx, queryStorageDir, logger);
const rqim = new RemoteQueriesInterfaceManager(ctx, logger, analysisResultsManager);
await rqim.showResults(sampleData.sampleRemoteQuery, sampleData.sampleRemoteQueryResult);
@@ -874,7 +919,7 @@ async function activateWithInstalledDistribution(
token: CancellationToken
) => {
await qs.restartQueryServer(progress, token);
void helpers.showAndLogInformationMessage('CodeQL Query Server restarted.', {
void showAndLogInformationMessage('CodeQL Query Server restarted.', {
outputLogger: queryServerLogger,
});
}, {
@@ -930,7 +975,7 @@ async function activateWithInstalledDistribution(
commandRunner('codeQL.copyVersion', async () => {
const text = `CodeQL extension version: ${extension?.packageJSON.version} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${os.platform()} ${os.arch()}`;
await env.clipboard.writeText(text);
void helpers.showAndLogInformationMessage(text);
void showAndLogInformationMessage(text);
}));
const getCliVersion = async () => {
@@ -952,7 +997,7 @@ async function activateWithInstalledDistribution(
const credentials = await Credentials.initialize(ctx);
const octokit = await credentials.getOctokit();
const userInfo = await octokit.users.getAuthenticated();
void helpers.showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
}
}));
@@ -987,18 +1032,23 @@ async function activateWithInstalledDistribution(
// Jump-to-definition and find-references
void logger.log('Registering jump-to-definition handlers.');
// Store contextual queries in a temporary folder so that they are removed
// when the application closes. There is no need for the user to interact with them.
const contextualQueryStorageDir = path.join(tmpDir.name, 'contextual-query-storage');
await fs.ensureDir(contextualQueryStorageDir);
languages.registerDefinitionProvider(
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
new TemplateQueryDefinitionProvider(cliServer, qs, dbm)
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, contextualQueryStorageDir)
);
languages.registerReferenceProvider(
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
new TemplateQueryReferenceProvider(cliServer, qs, dbm)
new TemplateQueryReferenceProvider(cliServer, qs, dbm, contextualQueryStorageDir)
);
const astViewer = new AstViewer();
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm);
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
const cfgTemplateProvider = new TemplatePrintCfgProvider(cliServer, dbm);
ctx.subscriptions.push(astViewer);
@@ -1027,8 +1077,7 @@ async function activateWithInstalledDistribution(
progress: ProgressCallback,
token: CancellationToken
) => {
const res = await
cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
const res = await cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
if (res) {
await compileAndRunQuery(false, res[0], progress, token, undefined);
}
@@ -1058,7 +1107,7 @@ async function activateWithInstalledDistribution(
}
function getContextStoragePath(ctx: ExtensionContext) {
return ctx.storagePath || ctx.globalStoragePath;
return ctx.storageUri?.fsPath || ctx.globalStorageUri.fsPath;
}
async function initializeLogging(ctx: ExtensionContext): Promise<void> {

View File

@@ -2,6 +2,7 @@ import * as fs from 'fs-extra';
import * as glob from 'glob-promise';
import * as yaml from 'js-yaml';
import * as path from 'path';
import * as tmp from 'tmp-promise';
import {
ExtensionContext,
Uri,
@@ -14,6 +15,17 @@ import { UserCancellationException } from './commandRunner';
import { logger } from './logging';
import { QueryMetadata } from './pure/interface-types';
// Shared temporary folder for the extension.
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
fs.ensureDirSync(upgradesTmpDir);
export const tmpDirDisposal = {
dispose: () => {
tmpDir.removeCallback();
}
};
/**
* Show an error message and log it to the console
*
@@ -533,3 +545,16 @@ export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath:
return;
}
}
/**
* Creates a file in the query directory that indicates when this query was created.
* This is important for keeping track of when queries should be removed.
*
* @param queryPath The directory that will containt all files relevant to a query result.
* It does not need to exist.
*/
export async function createTimestampFile(storagePath: string) {
const timestampPath = path.join(storagePath, 'timestamp');
await fs.ensureDir(storagePath);
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
}

View File

@@ -134,7 +134,7 @@ export function getHtmlForWebview(
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
const styleSrc = allowInlineStyles
? 'https://*.vscode-webview.net/ vscode-file: \'unsafe-inline\''
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
: `'nonce-${nonce}'`;
/*

View File

@@ -14,7 +14,7 @@ import {
import * as cli from './cli';
import { CodeQLCliServer } from './cli';
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
import { showAndLogErrorMessage } from './helpers';
import { showAndLogErrorMessage, tmpDir } from './helpers';
import { assertNever } from './pure/helpers-pure';
import {
FromResultsViewMsg,
@@ -34,7 +34,7 @@ import { Logger } from './logging';
import * as messages from './pure/messages';
import { commandRunner } from './commandRunner';
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
import { QueryEvaluationInfo, tmpDir } from './run-queries';
import { QueryEvaluationInfo } from './run-queries';
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
import {
WebviewReveal,
@@ -48,7 +48,7 @@ import {
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
import { PAGE_SIZE } from './config';
import { FullCompletedQueryInfo } from './query-results';
import { CompletedLocalQueryInfo } from './query-results';
/**
* interface.ts
@@ -119,7 +119,7 @@ function numInterpretedPages(interpretation: Interpretation | undefined): number
}
export class InterfaceManager extends DisposableObject {
private _displayedQuery?: FullCompletedQueryInfo;
private _displayedQuery?: CompletedLocalQueryInfo;
private _interpretation?: Interpretation;
private _panel: vscode.WebviewPanel | undefined;
private _panelLoaded = false;
@@ -379,7 +379,7 @@ export class InterfaceManager extends DisposableObject {
* history entry.
*/
public async showResults(
fullQuery: FullCompletedQueryInfo,
fullQuery: CompletedLocalQueryInfo,
forceReveal: WebviewReveal,
shouldKeepOldResultsWhileRendering = false
): Promise<void> {

View File

@@ -29,3 +29,8 @@ export const asyncFilter = async function <T>(arr: T[], predicate: (arg0: T) =>
const results = await Promise.all(arr.map(predicate));
return arr.filter((_, index) => results[index]);
};
export const ONE_DAY_IN_MS = 24 * 60 * 60 * 1000;
export const ONE_HOUR_IN_MS = 1000 * 60 * 60;
export const TWO_HOURS_IN_MS = 1000 * 60 * 60 * 2;
export const THREE_HOURS_IN_MS = 1000 * 60 * 60 * 3;

View File

@@ -0,0 +1,135 @@
import * as fs from 'fs-extra';
import * as os from 'os';
import * as path from 'path';
import { Disposable, ExtensionContext } from 'vscode';
import { logger } from './logging';
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
type Counter = {
increment: () => void;
};
/**
* Registers an interval timer that will periodically check for queries old enought
* to be deleted.
*
* Note that this scrubber will clean all queries from all workspaces. It should not
* run too often and it should only run from one workspace at a time.
*
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
*
* @param wakeInterval How often to check to see if the job should run.
* @param throttleTime How often to actually run the job.
* @param maxQueryTime The maximum age of a query before is ready for deletion.
* @param queryDirectory The directory containing all queries.
* @param ctx The extension context.
*/
export function registerQueryHistoryScubber(
wakeInterval: number,
throttleTime: number,
maxQueryTime: number,
queryDirectory: string,
ctx: ExtensionContext,
// optional counter to keep track of how many times the scrubber has run
counter?: Counter
): Disposable {
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, ctx, counter);
return {
dispose: () => {
clearInterval(deregister);
}
};
}
async function scrubQueries(
throttleTime: number,
maxQueryTime: number,
queryDirectory: string,
ctx: ExtensionContext,
counter?: Counter
) {
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
const now = Date.now();
// If we have never scrubbed before, or if the last scrub was more than `throttleTime` ago,
// then scrub again.
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
let scrubCount = 0; // total number of directories deleted
try {
counter?.increment();
void logger.log('Scrubbing query directory. Removing old queries.');
if (!(await fs.pathExists(queryDirectory))) {
void logger.log(`Cannot scrub. Query directory does not exist: ${queryDirectory}`);
return;
}
const baseNames = await fs.readdir(queryDirectory);
const errors: string[] = [];
for (const baseName of baseNames) {
const dir = path.join(queryDirectory, baseName);
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
if (scrubResult.errorMsg) {
errors.push(scrubResult.errorMsg);
}
if (scrubResult.deleted) {
scrubCount++;
}
}
if (errors.length) {
throw new Error(os.EOL + errors.join(os.EOL));
}
} catch (e) {
void logger.log(`Error while scrubbing queries: ${e}`);
} finally {
void logger.log(`Scrubbed ${scrubCount} old queries.`);
}
}
}
async function scrubDirectory(dir: string, now: number, maxQueryTime: number): Promise<{
errorMsg?: string,
deleted: boolean
}> {
const timestampFile = path.join(dir, 'timestamp');
try {
let deleted = true;
if (!(await fs.stat(dir)).isDirectory()) {
void logger.log(` ${dir} is not a directory. Deleting.`);
await fs.remove(dir);
} else if (!(await fs.pathExists(timestampFile))) {
void logger.log(` ${dir} has no timestamp file. Deleting.`);
await fs.remove(dir);
} else if (!(await fs.stat(timestampFile)).isFile()) {
void logger.log(` ${timestampFile} is not a file. Deleting.`);
await fs.remove(dir);
} else {
const timestampText = await fs.readFile(timestampFile, 'utf8');
const timestamp = parseInt(timestampText, 10);
if (Number.isNaN(timestamp)) {
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
await fs.remove(dir);
} else if (now - timestamp > maxQueryTime) {
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
await fs.remove(dir);
} else {
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
deleted = false;
}
}
return {
deleted
};
} catch (err) {
return {
errorMsg: ` Could not delete '${dir}': ${err}`,
deleted: false
};
}
}

View File

@@ -1,9 +1,11 @@
import * as path from 'path';
import {
commands,
Disposable,
env,
Event,
EventEmitter,
ExtensionContext,
ProviderResult,
Range,
ThemeIcon,
@@ -26,9 +28,12 @@ import { URLSearchParams } from 'url';
import { QueryServerClient } from './queryserver-client';
import { DisposableObject } from './pure/disposable-object';
import { commandRunner } from './commandRunner';
import { assertNever } from './pure/helpers-pure';
import { FullCompletedQueryInfo, FullQueryInfo, QueryStatus } from './query-results';
import { assertNever, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/helpers-pure';
import { CompletedLocalQueryInfo, LocalQueryInfo as LocalQueryInfo, QueryHistoryInfo } from './query-results';
import { DatabaseManager } from './databases';
import { registerQueryHistoryScubber } from './query-history-scrubber';
import { QueryStatus } from './query-status';
import { slurpQueryHistory, splatQueryHistory } from './query-serialization';
/**
* query-history.ts
@@ -71,6 +76,11 @@ const FAILED_QUERY_HISTORY_ITEM_ICON = 'media/red-x.svg';
*/
const LOCAL_SUCCESS_QUERY_HISTORY_ITEM_ICON = 'media/drive.svg';
/**
* Path to icon to display next to a successful remote run.
*/
const REMOTE_SUCCESS_QUERY_HISTORY_ITEM_ICON = 'media/globe.svg';
export enum SortOrder {
NameAsc = 'NameAsc',
NameDesc = 'NameDesc',
@@ -80,24 +90,34 @@ export enum SortOrder {
CountDesc = 'CountDesc',
}
/**
* Number of milliseconds two clicks have to arrive apart to be
* considered a double-click.
*/
const DOUBLE_CLICK_TIME = 500;
const WORKSPACE_QUERY_HISTORY_FILE = 'workspace-query-history.json';
/**
* Tree data provider for the query history view.
*/
export class HistoryTreeDataProvider extends DisposableObject {
private _sortOrder = SortOrder.DateAsc;
private _onDidChangeTreeData = super.push(new EventEmitter<FullQueryInfo | undefined>());
private _onDidChangeTreeData = super.push(new EventEmitter<QueryHistoryInfo | undefined>());
readonly onDidChangeTreeData: Event<FullQueryInfo | undefined> = this
readonly onDidChangeTreeData: Event<QueryHistoryInfo | undefined> = this
._onDidChangeTreeData.event;
private history: FullQueryInfo[] = [];
private history: QueryHistoryInfo[] = [];
private failedIconPath: string;
private localSuccessIconPath: string;
private current: FullQueryInfo | undefined;
private remoteSuccessIconPath: string;
private current: QueryHistoryInfo | undefined;
constructor(extensionPath: string) {
super();
@@ -109,15 +129,20 @@ export class HistoryTreeDataProvider extends DisposableObject {
extensionPath,
LOCAL_SUCCESS_QUERY_HISTORY_ITEM_ICON
);
this.remoteSuccessIconPath = path.join(
extensionPath,
REMOTE_SUCCESS_QUERY_HISTORY_ITEM_ICON
);
}
async getTreeItem(element: FullQueryInfo): Promise<TreeItem> {
async getTreeItem(element: QueryHistoryInfo): Promise<TreeItem> {
const treeItem = new TreeItem(element.label);
treeItem.command = {
title: 'Query History Item',
command: 'codeQLQueryHistory.itemClicked',
arguments: [element],
tooltip: element.failureReason || element.label
};
// Populate the icon and the context value. We use the context value to
@@ -129,11 +154,16 @@ export class HistoryTreeDataProvider extends DisposableObject {
treeItem.contextValue = 'inProgressResultsItem';
break;
case QueryStatus.Completed:
hasResults = await element.completedQuery?.query.hasInterpretedResults();
treeItem.iconPath = this.localSuccessIconPath;
treeItem.contextValue = hasResults
? 'interpretedResultsItem'
: 'rawResultsItem';
if (element.t === 'local') {
hasResults = await element.completedQuery?.query.hasInterpretedResults();
treeItem.iconPath = this.localSuccessIconPath;
treeItem.contextValue = hasResults
? 'interpretedResultsItem'
: 'rawResultsItem';
} else {
treeItem.iconPath = this.remoteSuccessIconPath;
treeItem.contextValue = 'remoteResultsItem';
}
break;
case QueryStatus.Failed:
treeItem.iconPath = this.failedIconPath;
@@ -147,9 +177,21 @@ export class HistoryTreeDataProvider extends DisposableObject {
}
getChildren(
element?: FullQueryInfo
): ProviderResult<FullQueryInfo[]> {
element?: QueryHistoryInfo
): ProviderResult<QueryHistoryInfo[]> {
return element ? [] : this.history.sort((h1, h2) => {
// TODO remote queries are not implemented yet.
if (h1.t !== 'local' && h2.t !== 'local') {
return 0;
}
if (h1.t !== 'local') {
return -1;
}
if (h2.t !== 'local') {
return 1;
}
const resultCount1 = h1.completedQuery?.resultCount ?? -1;
const resultCount2 = h2.completedQuery?.resultCount ?? -1;
@@ -178,25 +220,25 @@ export class HistoryTreeDataProvider extends DisposableObject {
});
}
getParent(_element: FullQueryInfo): ProviderResult<FullQueryInfo> {
getParent(_element: QueryHistoryInfo): ProviderResult<QueryHistoryInfo> {
return null;
}
getCurrent(): FullQueryInfo | undefined {
getCurrent(): QueryHistoryInfo | undefined {
return this.current;
}
pushQuery(item: FullQueryInfo): void {
pushQuery(item: QueryHistoryInfo): void {
this.history.push(item);
this.setCurrentItem(item);
this.refresh();
}
setCurrentItem(item?: FullQueryInfo) {
setCurrentItem(item?: QueryHistoryInfo) {
this.current = item;
}
remove(item: FullQueryInfo) {
remove(item: QueryHistoryInfo) {
const isCurrent = this.current === item;
if (isCurrent) {
this.setCurrentItem();
@@ -213,10 +255,16 @@ export class HistoryTreeDataProvider extends DisposableObject {
}
}
get allHistory(): FullQueryInfo[] {
get allHistory(): QueryHistoryInfo[] {
return this.history;
}
set allHistory(history: QueryHistoryInfo[]) {
this.history = history;
this.current = history[0];
this.refresh();
}
refresh() {
this._onDidChangeTreeData.fire(undefined);
}
@@ -231,35 +279,48 @@ export class HistoryTreeDataProvider extends DisposableObject {
}
}
/**
* Number of milliseconds two clicks have to arrive apart to be
* considered a double-click.
*/
const DOUBLE_CLICK_TIME = 500;
const NO_QUERY_SELECTED = 'No query selected. Select a query history item you have already run and try again.';
export class QueryHistoryManager extends DisposableObject {
treeDataProvider: HistoryTreeDataProvider;
treeView: TreeView<FullQueryInfo>;
lastItemClick: { time: Date; item: FullQueryInfo } | undefined;
compareWithItem: FullQueryInfo | undefined;
treeView: TreeView<QueryHistoryInfo>;
lastItemClick: { time: Date; item: QueryHistoryInfo } | undefined;
compareWithItem: LocalQueryInfo | undefined;
queryHistoryScrubber: Disposable | undefined;
private queryMetadataStorageLocation;
private readonly _onDidAddQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
readonly onDidAddQueryItem: Event<QueryHistoryInfo> = this
._onDidAddQueryItem.event;
private readonly _onDidRemoveQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
readonly onDidRemoveQueryItem: Event<QueryHistoryInfo> = this
._onDidRemoveQueryItem.event;
private readonly _onWillOpenQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
readonly onWillOpenQueryItem: Event<QueryHistoryInfo> = this
._onWillOpenQueryItem.event;
constructor(
private qs: QueryServerClient,
private dbm: DatabaseManager,
extensionPath: string,
queryHistoryConfigListener: QueryHistoryConfig,
private selectedCallback: (item: FullCompletedQueryInfo) => Promise<void>,
private queryStorageDir: string,
ctx: ExtensionContext,
private queryHistoryConfigListener: QueryHistoryConfig,
private doCompareCallback: (
from: FullCompletedQueryInfo,
to: FullCompletedQueryInfo
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo
) => Promise<void>
) {
super();
// Note that we use workspace storage to hold the metadata for the query history.
// This is because the query history is specific to each workspace.
// For situations where `ctx.storageUri` is undefined (i.e., there is no workspace),
// we default to global storage.
this.queryMetadataStorageLocation = path.join((ctx.storageUri || ctx.globalStorageUri).fsPath, WORKSPACE_QUERY_HISTORY_FILE);
this.treeDataProvider = this.push(new HistoryTreeDataProvider(
extensionPath
ctx.extensionPath
));
this.treeView = this.push(window.createTreeView('codeQLQueryHistory', {
treeDataProvider: this.treeDataProvider,
@@ -281,7 +342,12 @@ export class QueryHistoryManager extends DisposableObject {
} else {
this.treeDataProvider.setCurrentItem(ev.selection[0]);
}
this.updateCompareWith([...ev.selection]);
if (ev.selection.some(item => item.t !== 'local')) {
// Don't allow comparison of non-local items
this.updateCompareWith([]);
} else {
this.updateCompareWith([...ev.selection] as LocalQueryInfo[]);
}
})
);
@@ -373,14 +439,20 @@ export class QueryHistoryManager extends DisposableObject {
this.push(
commandRunner(
'codeQLQueryHistory.itemClicked',
async (item: FullQueryInfo) => {
async (item: LocalQueryInfo) => {
return this.handleItemClicked(item, [item]);
}
)
);
// There are two configuration items that affect the query history:
// 1. The ttl for query history items.
// 2. The default label for query history items.
// When either of these change, must refresh the tree view.
this.push(
queryHistoryConfigListener.onDidChangeConfiguration(() => {
this.treeDataProvider.refresh();
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
})
);
@@ -398,65 +470,111 @@ export class QueryHistoryManager extends DisposableObject {
);
},
}));
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
}
async invokeCallbackOn(queryHistoryItem: FullQueryInfo) {
if (this.selectedCallback && queryHistoryItem.isCompleted()) {
const sc = this.selectedCallback;
await sc(queryHistoryItem as FullCompletedQueryInfo);
}
/**
* Register and create the history scrubber.
*/
private registerQueryHistoryScrubber(queryHistoryConfigListener: QueryHistoryConfig, ctx: ExtensionContext) {
this.queryHistoryScrubber?.dispose();
// Every hour check if we need to re-run the query history scrubber.
this.queryHistoryScrubber = this.push(
registerQueryHistoryScubber(
ONE_HOUR_IN_MS,
TWO_HOURS_IN_MS,
queryHistoryConfigListener.ttlInMillis,
this.queryStorageDir,
ctx
)
);
}
async readQueryHistory(): Promise<void> {
void logger.log(`Reading cached query history from '${this.queryMetadataStorageLocation}'.`);
const history = await slurpQueryHistory(this.queryMetadataStorageLocation, this.queryHistoryConfigListener);
this.treeDataProvider.allHistory = history;
this.treeDataProvider.allHistory.forEach((item) => {
this._onDidAddQueryItem.fire(item);
});
}
async writeQueryHistory(): Promise<void> {
await splatQueryHistory(this.treeDataProvider.allHistory, this.queryMetadataStorageLocation);
}
async handleOpenQuery(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
): Promise<void> {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
if (!this.assertSingleQuery(finalMultiSelect)) {
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem) {
return;
}
if (!finalSingleItem) {
throw new Error(NO_QUERY_SELECTED);
}
const queryPath = finalSingleItem.t === 'local'
? finalSingleItem.initialInfo.queryPath
: finalSingleItem.remoteQuery.queryFilePath;
const textDocument = await workspace.openTextDocument(
Uri.file(finalSingleItem.initialInfo.queryPath)
Uri.file(queryPath)
);
const editor = await window.showTextDocument(
textDocument,
ViewColumn.One
);
const queryText = finalSingleItem.initialInfo.queryText;
if (queryText !== undefined && finalSingleItem.initialInfo.isQuickQuery) {
await editor.edit((edit) =>
edit.replace(
textDocument.validateRange(
new Range(0, 0, textDocument.lineCount, 0)
),
queryText
)
);
if (finalSingleItem.t === 'local') {
const queryText = finalSingleItem.initialInfo.queryText;
if (queryText !== undefined && finalSingleItem.initialInfo.isQuickQuery) {
await editor.edit((edit) =>
edit.replace(
textDocument.validateRange(
new Range(0, 0, textDocument.lineCount, 0)
),
queryText
)
);
}
}
}
async handleRemoveHistoryItem(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[] = []
) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
const toDelete = (finalMultiSelect || [finalSingleItem]);
await Promise.all(toDelete.map(async (item) => {
if (item.t === 'local') {
// Removing in progress local queries is not supported. They must be cancelled first.
if (item.status !== QueryStatus.InProgress) {
this.treeDataProvider.remove(item);
item.completedQuery?.dispose();
(finalMultiSelect || [finalSingleItem]).forEach((item) => {
// Removing in progress queries is not supported yet
if (item.status !== QueryStatus.InProgress) {
// User has explicitly asked for this query to be removed.
// We need to delete it from disk as well.
await item.completedQuery?.query.deleteQuery();
}
} else {
// Remote queries can be removed locally, but not remotely.
// The user must cancel the query on GitHub Actions explicitly.
this.treeDataProvider.remove(item);
item.completedQuery?.dispose();
void logger.log(`Deleted ${item.label}.`);
if (item.status === QueryStatus.InProgress) {
void logger.log('The remote query is still running on GitHub Actions. To cancel there, you must go to the query run in your browser.');
}
this._onDidRemoveQueryItem.fire(item);
}
});
}));
await this.writeQueryHistory();
const current = this.treeDataProvider.getCurrent();
if (current !== undefined) {
await this.treeView.reveal(current, { select: true });
await this.invokeCallbackOn(current);
this._onWillOpenQueryItem.fire(current);
}
}
@@ -485,40 +603,50 @@ export class QueryHistoryManager extends DisposableObject {
}
async handleSetLabel(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
): Promise<void> {
if (!this.assertSingleQuery(multiSelect)) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
// TODO will support remote queries
if (!this.assertSingleQuery(finalMultiSelect) || finalSingleItem?.t !== 'local') {
return;
}
const response = await window.showInputBox({
prompt: 'Label:',
placeHolder: '(use default)',
value: singleItem.label,
value: finalSingleItem.label,
});
// undefined response means the user cancelled the dialog; don't change anything
if (response !== undefined) {
// Interpret empty string response as 'go back to using default'
singleItem.initialInfo.userSpecifiedLabel = response === '' ? undefined : response;
finalSingleItem.initialInfo.userSpecifiedLabel = response === '' ? undefined : response;
this.treeDataProvider.refresh();
}
}
async handleCompareWith(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
try {
if (!singleItem.completedQuery?.didRunSuccessfully) {
throw new Error('Please select a successful query.');
// local queries only
if (finalSingleItem?.t !== 'local') {
throw new Error('Please select a local query.');
}
if (!finalSingleItem.completedQuery?.didRunSuccessfully) {
throw new Error('Please select a query that has completed successfully.');
}
const from = this.compareWithItem || singleItem;
const to = await this.findOtherQueryToCompare(from, multiSelect);
const to = await this.findOtherQueryToCompare(from, finalMultiSelect);
if (from.isCompleted() && to?.isCompleted()) {
await this.doCompareCallback(from as FullCompletedQueryInfo, to as FullCompletedQueryInfo);
if (from.completed && to?.completed) {
await this.doCompareCallback(from as CompletedLocalQueryInfo, to as CompletedLocalQueryInfo);
}
} catch (e) {
void showAndLogErrorMessage(e.message);
@@ -526,18 +654,14 @@ export class QueryHistoryManager extends DisposableObject {
}
async handleItemClicked(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[] = []
) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
if (!this.assertSingleQuery(finalMultiSelect)) {
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem) {
return;
}
if (!finalSingleItem) {
throw new Error(NO_QUERY_SELECTED);
}
this.treeDataProvider.setCurrentItem(finalSingleItem);
const now = new Date();
@@ -552,16 +676,19 @@ export class QueryHistoryManager extends DisposableObject {
// show original query file on double click
await this.handleOpenQuery(finalSingleItem, [finalSingleItem]);
} else {
// show results on single click
await this.invokeCallbackOn(finalSingleItem);
// show results on single click only if query is completed successfully.
if (finalSingleItem.status === QueryStatus.Completed) {
await this._onWillOpenQueryItem.fire(finalSingleItem);
}
}
}
async handleShowQueryLog(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
if (!this.assertSingleQuery(multiSelect)) {
// Local queries only
if (!this.assertSingleQuery(multiSelect) || singleItem?.t !== 'local') {
return;
}
@@ -577,56 +704,64 @@ export class QueryHistoryManager extends DisposableObject {
}
async handleCancel(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
// Local queries only
// In the future, we may support cancelling remote queries, but this is not a short term plan.
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
(finalMultiSelect || [finalSingleItem]).forEach((item) => {
if (item.status === QueryStatus.InProgress) {
if (item.status === QueryStatus.InProgress && item.t === 'local') {
item.cancel();
}
});
}
async handleShowQueryText(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[] = []
) {
if (!this.assertSingleQuery(multiSelect)) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem) {
return;
}
if (!singleItem) {
throw new Error(NO_QUERY_SELECTED);
}
const params = new URLSearchParams({
isQuickEval: String(!!singleItem.initialInfo.quickEvalPosition),
queryText: encodeURIComponent(await this.getQueryText(singleItem)),
isQuickEval: String(!!(finalSingleItem.t === 'local' && finalSingleItem.initialInfo.quickEvalPosition)),
queryText: encodeURIComponent(await this.getQueryText(finalSingleItem)),
});
const queryId = finalSingleItem.t === 'local'
? finalSingleItem.initialInfo.id
: finalSingleItem.queryId;
const uri = Uri.parse(
`codeql:${singleItem.initialInfo.id}?${params.toString()}`, true
`codeql:${queryId}?${params.toString()}`, true
);
const doc = await workspace.openTextDocument(uri);
await window.showTextDocument(doc, { preview: false });
}
async handleViewSarifAlerts(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
if (!this.assertSingleQuery(multiSelect) || !singleItem.completedQuery) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
// Local queries only
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local' || !finalSingleItem.completedQuery) {
return;
}
const query = singleItem.completedQuery.query;
const query = finalSingleItem.completedQuery.query;
const hasInterpretedResults = query.canHaveInterpretedResults();
if (hasInterpretedResults) {
await this.tryOpenExternalFile(
query.resultsPaths.interpretedResultsPath
);
} else {
const label = singleItem.label;
const label = finalSingleItem.label;
void showAndLogInformationMessage(
`Query ${label} has no interpreted results.`
);
@@ -634,16 +769,16 @@ export class QueryHistoryManager extends DisposableObject {
}
async handleViewCsvResults(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
if (!this.assertSingleQuery(multiSelect)) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
// Local queries only
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local' || !finalSingleItem.completedQuery) {
return;
}
if (!singleItem.completedQuery) {
return;
}
const query = singleItem.completedQuery.query;
const query = finalSingleItem.completedQuery.query;
if (await query.hasCsv()) {
void this.tryOpenExternalFile(query.csvPath);
return;
@@ -656,41 +791,47 @@ export class QueryHistoryManager extends DisposableObject {
}
async handleViewCsvAlerts(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
) {
if (!this.assertSingleQuery(multiSelect) || !singleItem.completedQuery) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
// Local queries only
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local' || !finalSingleItem.completedQuery) {
return;
}
await this.tryOpenExternalFile(
await singleItem.completedQuery.query.ensureCsvProduced(this.qs, this.dbm)
await finalSingleItem.completedQuery.query.ensureCsvAlerts(this.qs, this.dbm)
);
}
async handleViewDil(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[],
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[],
) {
if (!this.assertSingleQuery(multiSelect)) {
return;
}
if (!singleItem.completedQuery) {
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
// Local queries only
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local' || !finalSingleItem.completedQuery) {
return;
}
await this.tryOpenExternalFile(
await singleItem.completedQuery.query.ensureDilPath(this.qs)
await finalSingleItem.completedQuery.query.ensureDilPath(this.qs)
);
}
async getQueryText(queryHistoryItem: FullQueryInfo): Promise<string> {
return queryHistoryItem.initialInfo.queryText;
async getQueryText(item: QueryHistoryInfo): Promise<string> {
return item.t === 'local'
? item.initialInfo.queryText
: item.remoteQuery.queryText;
}
addQuery(item: FullQueryInfo) {
addQuery(item: QueryHistoryInfo) {
this.treeDataProvider.pushQuery(item);
this.updateTreeViewSelectionIfVisible();
this._onDidAddQueryItem.fire(item);
}
/**
@@ -747,10 +888,12 @@ the file in the file explorer and dragging it into the workspace.`
}
private async findOtherQueryToCompare(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
): Promise<FullQueryInfo | undefined> {
if (!singleItem.completedQuery) {
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
): Promise<CompletedLocalQueryInfo | undefined> {
// Remote queries cannot be compared
if (singleItem.t !== 'local' || multiSelect.some(s => s.t !== 'local') || !singleItem.completedQuery) {
return undefined;
}
const dbName = singleItem.initialInfo.databaseInfo.name;
@@ -759,7 +902,7 @@ the file in the file explorer and dragging it into the workspace.`
if (multiSelect?.length === 2) {
// return the query that is not the first selected one
const otherQuery =
singleItem === multiSelect[0] ? multiSelect[1] : multiSelect[0];
(singleItem === multiSelect[0] ? multiSelect[1] : multiSelect[0]) as LocalQueryInfo;
if (!otherQuery.completedQuery) {
throw new Error('Please select a completed query.');
}
@@ -769,10 +912,10 @@ the file in the file explorer and dragging it into the workspace.`
if (otherQuery.initialInfo.databaseInfo.name !== dbName) {
throw new Error('Query databases must be the same.');
}
return otherQuery;
return otherQuery as CompletedLocalQueryInfo;
}
if (multiSelect?.length > 1) {
if (multiSelect?.length > 2) {
throw new Error('Please select no more than 2 queries.');
}
@@ -781,15 +924,16 @@ the file in the file explorer and dragging it into the workspace.`
.filter(
(otherQuery) =>
otherQuery !== singleItem &&
otherQuery.t === 'local' &&
otherQuery.completedQuery &&
otherQuery.completedQuery.didRunSuccessfully &&
otherQuery.initialInfo.databaseInfo.name === dbName
)
.map((item) => ({
label: item.label,
description: item.initialInfo.databaseInfo.name,
detail: item.completedQuery!.statusString,
query: item,
description: (item as CompletedLocalQueryInfo).initialInfo.databaseInfo.name,
detail: (item as CompletedLocalQueryInfo).completedQuery.statusString,
query: item as CompletedLocalQueryInfo,
}));
if (comparableQueryLabels.length < 1) {
throw new Error('No other queries available to compare with.');
@@ -798,7 +942,7 @@ the file in the file explorer and dragging it into the workspace.`
return choice?.query;
}
private assertSingleQuery(multiSelect: FullQueryInfo[] = [], message = 'Please select a single query.') {
private assertSingleQuery(multiSelect: QueryHistoryInfo[] = [], message = 'Please select a single query.') {
if (multiSelect.length > 1) {
void showAndLogErrorMessage(
message
@@ -825,7 +969,7 @@ the file in the file explorer and dragging it into the workspace.`
*
* @param newSelection the new selection after the most recent selection change
*/
private updateCompareWith(newSelection: FullQueryInfo[]) {
private updateCompareWith(newSelection: LocalQueryInfo[]) {
if (newSelection.length === 1) {
this.compareWithItem = newSelection[0];
} else if (
@@ -849,9 +993,12 @@ the file in the file explorer and dragging it into the workspace.`
* @param multiSelect a multi-select or undefined if no items are selected
*/
private determineSelection(
singleItem: FullQueryInfo,
multiSelect: FullQueryInfo[]
): { readonly finalSingleItem: FullQueryInfo; readonly finalMultiSelect: FullQueryInfo[] } {
singleItem: QueryHistoryInfo,
multiSelect: QueryHistoryInfo[]
): {
finalSingleItem: QueryHistoryInfo;
finalMultiSelect: QueryHistoryInfo[]
} {
if (!singleItem && !multiSelect?.[0]) {
const selection = this.treeView.selection;
const current = this.treeDataProvider.getCurrent();
@@ -868,7 +1015,7 @@ the file in the file explorer and dragging it into the workspace.`
}
}
// ensure we do not return undefined
// ensure we only return undefined if we have neither a single or multi-selecion
if (singleItem && !multiSelect?.[0]) {
multiSelect = [singleItem];
} else if (!singleItem && multiSelect?.[0]) {
@@ -880,7 +1027,8 @@ the file in the file explorer and dragging it into the workspace.`
};
}
refreshTreeView(): void {
async refreshTreeView(): Promise<void> {
this.treeDataProvider.refresh();
await this.writeQueryHistory();
}
}

View File

@@ -16,7 +16,16 @@ import {
} from './pure/interface-types';
import { QueryHistoryConfig } from './config';
import { DatabaseInfo } from './pure/interface-types';
import { showAndLogErrorMessage } from './helpers';
import { QueryStatus } from './query-status';
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
/**
* query-results.ts
* ----------------
*
* A collection of classes and functions that collectively
* manage query results.
*/
/**
* A description of the information about a query
@@ -34,12 +43,6 @@ export interface InitialQueryInfo {
readonly id: string; // unique id for this query.
}
export enum QueryStatus {
InProgress = 'InProgress',
Completed = 'Completed',
Failed = 'Failed',
}
export class CompletedQueryInfo implements QueryWithResults {
readonly query: QueryEvaluationInfo;
readonly result: messages.EvaluationResult;
@@ -202,84 +205,36 @@ export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
/**
* Used in Interface and Compare-Interface for queries that we know have been complated.
*/
export type FullCompletedQueryInfo = FullQueryInfo & {
export type CompletedLocalQueryInfo = LocalQueryInfo & {
completedQuery: CompletedQueryInfo
};
export class FullQueryInfo {
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem;
static async slurp(fsPath: string, config: QueryHistoryConfig): Promise<FullQueryInfo[]> {
try {
const data = await fs.readFile(fsPath, 'utf8');
const queries = JSON.parse(data);
return queries.map((q: FullQueryInfo) => {
// Need to explicitly set prototype since reading in from JSON will not
// do this automatically. Note that we can't call the constructor here since
// the constructor invokes extra logic that we don't want to do.
Object.setPrototypeOf(q, FullQueryInfo.prototype);
// The config object is a global, se we need to set it explicitly
// and ensure it is not serialized to JSON.
q.setConfig(config);
// Date instances are serialized as strings. Need to
// convert them back to Date instances.
(q.initialInfo as any).start = new Date(q.initialInfo.start);
if (q.completedQuery) {
// Again, need to explicitly set prototypes.
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
// slurped queries do not need to be disposed
q.completedQuery.dispose = () => { /**/ };
}
return q;
});
} catch (e) {
void showAndLogErrorMessage('Error loading query history.', {
fullMessage: ['Error loading query history.', e.stack].join('\n'),
});
return [];
}
}
/**
* Save the query history to disk. It is not necessary that the parent directory
* exists, but if it does, it must be writable. An existing file will be overwritten.
*
* Any errors will be rethrown.
*
* @param queries the list of queries to save.
* @param fsPath the path to save the queries to.
*/
static async splat(queries: FullQueryInfo[], fsPath: string): Promise<void> {
try {
const data = JSON.stringify(queries, null, 2);
await fs.mkdirp(path.dirname(fsPath));
await fs.writeFile(fsPath, data);
} catch (e) {
throw new Error(`Error saving query history to ${fsPath}: ${e.message}`);
}
}
export class LocalQueryInfo {
readonly t = 'local';
public failureReason: string | undefined;
public completedQuery: CompletedQueryInfo | undefined;
private config: QueryHistoryConfig | undefined;
/**
* Note that in the {@link FullQueryInfo.slurp} method, we create a FullQueryInfo instance
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
* by explicitly setting the prototype in order to avoid calling this constructor.
*/
constructor(
public readonly initialInfo: InitialQueryInfo,
config: QueryHistoryConfig,
private readonly source?: CancellationTokenSource
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
) {
this.setConfig(config);
}
cancel() {
this.source?.cancel();
this.cancellationSource?.cancel();
// query is no longer in progress, can delete the cancellation token source
this.cancellationSource?.dispose();
delete this.cancellationSource;
}
get startTime() {
@@ -356,12 +311,16 @@ export class FullQueryInfo {
}
}
isCompleted(): boolean {
get completed(): boolean {
return !!this.completedQuery;
}
completeThisQuery(info: QueryWithResults) {
this.completedQuery = new CompletedQueryInfo(info);
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
this.cancellationSource?.dispose();
delete this.cancellationSource;
}
/**
@@ -389,7 +348,7 @@ export class FullQueryInfo {
*
* @param config the global query history config object
*/
private setConfig(config: QueryHistoryConfig) {
setConfig(config: QueryHistoryConfig) {
// avoid serializing config property
Object.defineProperty(this, 'config', {
enumerable: false,

View File

@@ -0,0 +1,99 @@
import * as fs from 'fs-extra';
import * as path from 'path';
import { QueryHistoryConfig } from './config';
import { showAndLogErrorMessage } from './helpers';
import { asyncFilter } from './pure/helpers-pure';
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
import { QueryEvaluationInfo } from './run-queries';
export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConfig): Promise<QueryHistoryInfo[]> {
try {
if (!(await fs.pathExists(fsPath))) {
return [];
}
const data = await fs.readFile(fsPath, 'utf8');
const obj = JSON.parse(data);
if (obj.version !== 1) {
void showAndLogErrorMessage(`Unsupported query history format: v${obj.version}. `);
return [];
}
const queries = obj.queries;
const parsedQueries = queries.map((q: QueryHistoryInfo) => {
// Need to explicitly set prototype since reading in from JSON will not
// do this automatically. Note that we can't call the constructor here since
// the constructor invokes extra logic that we don't want to do.
if (q.t === 'local') {
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
// The config object is a global, se we need to set it explicitly
// and ensure it is not serialized to JSON.
q.setConfig(config);
// Date instances are serialized as strings. Need to
// convert them back to Date instances.
(q.initialInfo as any).start = new Date(q.initialInfo.start);
if (q.completedQuery) {
// Again, need to explicitly set prototypes.
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
// slurped queries do not need to be disposed
q.completedQuery.dispose = () => { /**/ };
}
} else if (q.t === 'remote') {
// noop
}
return q;
});
// filter out queries that have been deleted on disk
// most likely another workspace has deleted them because the
// queries aged out.
return asyncFilter(parsedQueries, async (q) => {
if (q.t === 'remote') {
// the slurper doesn't know where the remote queries are stored
// so we need to assume here that they exist. Later, we check to
// see if they exist on disk.
return true;
}
const resultsPath = q.completedQuery?.query.resultsPaths.resultsPath;
return !!resultsPath && await fs.pathExists(resultsPath);
});
} catch (e) {
void showAndLogErrorMessage('Error loading query history.', {
fullMessage: ['Error loading query history.', e.stack].join('\n'),
});
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
await fs.remove(fsPath);
return [];
}
}
/**
* Save the query history to disk. It is not necessary that the parent directory
* exists, but if it does, it must be writable. An existing file will be overwritten.
*
* Any errors will be rethrown.
*
* @param queries the list of queries to save.
* @param fsPath the path to save the queries to.
*/
export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: string): Promise<void> {
try {
if (!(await fs.pathExists(fsPath))) {
await fs.mkdir(path.dirname(fsPath), { recursive: true });
}
// remove incomplete local queries since they cannot be recreated on restart
const filteredQueries = queries.filter(q => q.t === 'local' ? q.completedQuery !== undefined : true);
const data = JSON.stringify({
version: 1,
queries: filteredQueries
}, null, 2);
await fs.writeFile(fsPath, data);
} catch (e) {
throw new Error(`Error saving query history to ${fsPath}: ${e.message}`);
}
}

View File

@@ -0,0 +1,5 @@
export enum QueryStatus {
InProgress = 'InProgress',
Completed = 'Completed',
Failed = 'Failed',
}

View File

@@ -171,6 +171,16 @@ export class QueryServerClient extends DisposableObject {
args.push('--old-eval-stats');
}
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
args.push('--evaluator-log');
args.push(`${this.opts.contextStoragePath}/structured-evaluator-log.json`);
// We hard-code the verbosity level to 5 and minify to false.
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
args.push('--evaluator-log-level');
args.push('5');
}
if (this.config.debug) {
args.push('--debug', '--tuple-counting');
}
@@ -234,7 +244,7 @@ export class QueryServerClient extends DisposableObject {
}
get serverProcessPid(): number {
return this.serverProcess!.child.pid;
return this.serverProcess!.child.pid || 0;
}
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {

View File

@@ -1,31 +1,34 @@
import * as os from 'os';
import * as path from 'path';
import { CancellationToken, ExtensionContext } from 'vscode';
import { Credentials } from '../authentication';
import { Logger } from '../logging';
import { downloadArtifactFromLink } from './gh-actions-api-client';
import * as path from 'path';
import { AnalysisSummary } from './shared/remote-query-result';
import { AnalysisResults, QueryResult } from './shared/analysis-result';
import { UserCancellationException } from '../commandRunner';
import * as os from 'os';
import { sarifParser } from '../sarif-parser';
export class AnalysesResultsManager {
// Store for the results of various analyses for a single remote query.
private readonly analysesResults: AnalysisResults[];
// Store for the results of various analyses for each remote query.
// The key is the queryId and is also the name of the directory where results are stored.
private readonly analysesResults: Map<string, AnalysisResults[]>;
constructor(
private readonly ctx: ExtensionContext,
readonly storagePath: string,
private readonly logger: Logger,
) {
this.analysesResults = [];
this.analysesResults = new Map();
}
public async downloadAnalysisResults(
analysisSummary: AnalysisSummary,
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
): Promise<void> {
if (this.analysesResults.some(x => x.nwo === analysisSummary.nwo)) {
// We already have the results for this analysis, don't download again.
if (this.isAnalysisInMemory(analysisSummary)) {
// We already have the results for this analysis in memory, don't download again.
return;
}
@@ -37,10 +40,13 @@ export class AnalysesResultsManager {
}
public async downloadAnalysesResults(
analysesToDownload: AnalysisSummary[],
allAnalysesToDownload: AnalysisSummary[],
token: CancellationToken | undefined,
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
): Promise<void> {
// Filter out analyses that we have already in memory.
const analysesToDownload = allAnalysesToDownload.filter(x => !this.isAnalysisInMemory(x));
const credentials = await Credentials.initialize(this.ctx);
void this.logger.log('Downloading and processing analyses results');
@@ -74,8 +80,16 @@ export class AnalysesResultsManager {
}
}
public getAnalysesResults(): AnalysisResults[] {
return [...this.analysesResults];
public getAnalysesResults(queryId: string): AnalysisResults[] {
return [...this.internalGetAnalysesResults(queryId)];
}
private internalGetAnalysesResults(queryId: string): AnalysisResults[] {
return this.analysesResults.get(queryId) || [];
}
public removeAnalysesResults(queryId: string) {
this.analysesResults.delete(queryId);
}
private async downloadSingleAnalysisResults(
@@ -88,28 +102,38 @@ export class AnalysesResultsManager {
status: 'InProgress',
results: []
};
this.analysesResults.push(analysisResults);
void publishResults(this.analysesResults);
const queryId = analysis.downloadLink.queryId;
const resultsForQuery = this.internalGetAnalysesResults(queryId);
resultsForQuery.push(analysisResults);
this.analysesResults.set(queryId, resultsForQuery);
void publishResults([...resultsForQuery]);
const pos = resultsForQuery.length - 1;
let artifactPath;
try {
artifactPath = await downloadArtifactFromLink(credentials, analysis.downloadLink);
artifactPath = await downloadArtifactFromLink(credentials, this.storagePath, analysis.downloadLink);
}
catch (e) {
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${e.message}`);
}
let newAnaysisResults: AnalysisResults;
if (path.extname(artifactPath) === '.sarif') {
const queryResults = await this.readResults(artifactPath);
analysisResults.results = queryResults;
analysisResults.status = 'Completed';
newAnaysisResults = {
...analysisResults,
results: queryResults,
status: 'Completed'
};
} else {
void this.logger.log('Cannot download results. Only alert and path queries are fully supported.');
analysisResults.status = 'Failed';
newAnaysisResults = {
...analysisResults,
status: 'Failed'
};
}
void publishResults(this.analysesResults);
resultsForQuery[pos] = newAnaysisResults;
void publishResults([...resultsForQuery]);
}
private async readResults(filePath: string): Promise<QueryResult[]> {
@@ -119,7 +143,7 @@ export class AnalysesResultsManager {
// Read the sarif file and extract information that we want to display
// in the UI. For now we're only getting the message texts but we'll gradually
// extract more information based on the UX we want to build.
// extract more information based on the UX we want to build.
sarifLog.runs?.forEach(run => {
run?.results?.forEach(result => {
@@ -133,4 +157,8 @@ export class AnalysesResultsManager {
return queryResults;
}
private isAnalysisInMemory(analysis: AnalysisSummary): boolean {
return this.internalGetAnalysesResults(analysis.downloadLink.queryId).some(x => x.nwo === analysis.nwo);
}
}

View File

@@ -1,15 +1,15 @@
/**
* Represents a link to an artifact to be downloaded.
* Represents a link to an artifact to be downloaded.
*/
export interface DownloadLink {
/**
* A unique id of the artifact being downloaded.
* A unique id of the artifact being downloaded.
*/
id: string;
/**
* The URL path to use against the GitHub API to download the
* linked artifact.
* linked artifact.
*/
urlPath: string;
@@ -17,4 +17,9 @@ export interface DownloadLink {
* An optional path to follow inside the downloaded archive containing the artifact.
*/
innerFilePath?: string;
/**
* A unique id of the remote query run. This is used to determine where to store artifacts and data from the run.
*/
queryId: string;
}

View File

@@ -1,16 +1,15 @@
import * as unzipper from 'unzipper';
import * as path from 'path';
import * as fs from 'fs-extra';
import { showAndLogWarningMessage } from '../helpers';
import { showAndLogWarningMessage, tmpDir } from '../helpers';
import { Credentials } from '../authentication';
import { logger } from '../logging';
import { tmpDir } from '../run-queries';
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
import { DownloadLink } from './download-link';
import { RemoteQuery } from './remote-query';
import { RemoteQueryResultIndex, RemoteQueryResultIndexItem } from './remote-query-result-index';
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
interface ApiResultIndexItem {
interface ApiSuccessIndexItem {
nwo: string;
id: string;
results_count: number;
@@ -18,6 +17,17 @@ interface ApiResultIndexItem {
sarif_file_size?: number;
}
interface ApiFailureIndexItem {
nwo: string;
id: string;
error: string;
}
interface ApiResultIndex {
successes: ApiSuccessIndexItem[];
failures: ApiFailureIndexItem[];
}
export async function getRemoteQueryIndex(
credentials: Credentials,
remoteQuery: RemoteQuery
@@ -32,9 +42,9 @@ export async function getRemoteQueryIndex(
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repoName, workflowRunId);
const resultIndexArtifactId = getArtifactIDfromName('result-index', workflowUri, artifactList);
const resultIndexItems = await getResultIndexItems(credentials, owner, repoName, resultIndexArtifactId);
const resultIndex = await getResultIndex(credentials, owner, repoName, resultIndexArtifactId);
const items = resultIndexItems.map(item => {
const successes = resultIndex?.successes.map(item => {
const artifactId = getArtifactIDfromName(item.id, workflowUri, artifactList);
return {
@@ -43,35 +53,47 @@ export async function getRemoteQueryIndex(
nwo: item.nwo,
resultCount: item.results_count,
bqrsFileSize: item.bqrs_file_size,
sarifFileSize: item.sarif_file_size,
} as RemoteQueryResultIndexItem;
sarifFileSize: item.sarif_file_size
} as RemoteQuerySuccessIndexItem;
});
const failures = resultIndex?.failures.map(item => {
return {
id: item.id.toString(),
nwo: item.nwo,
error: item.error
} as RemoteQueryFailureIndexItem;
});
return {
artifactsUrlPath,
items
successes: successes || [],
failures: failures || []
};
}
export async function downloadArtifactFromLink(
credentials: Credentials,
storagePath: string,
downloadLink: DownloadLink
): Promise<string> {
const octokit = await credentials.getOctokit();
// Download the zipped artifact.
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
const extractedPath = path.join(storagePath, downloadLink.queryId, downloadLink.id);
const zipFilePath = path.join(tmpDir.name, `${downloadLink.id}.zip`);
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
// first check if we already have the artifact
if (!(await fs.pathExists(extractedPath))) {
// Download the zipped artifact.
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
// Extract the zipped artifact.
const extractedPath = path.join(tmpDir.name, downloadLink.id);
await unzipFile(zipFilePath, extractedPath);
const zipFilePath = path.join(storagePath, downloadLink.queryId, `${downloadLink.id}.zip`);
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
return downloadLink.innerFilePath
? path.join(extractedPath, downloadLink.innerFilePath)
: extractedPath;
// Extract the zipped artifact.
await unzipFile(zipFilePath, extractedPath);
}
return path.join(extractedPath, downloadLink.innerFilePath || '');
}
/**
@@ -82,17 +104,17 @@ export async function downloadArtifactFromLink(
* @param workflowRunId The ID of the workflow run to get the result index for.
* @returns An object containing the result index.
*/
async function getResultIndexItems(
async function getResultIndex(
credentials: Credentials,
owner: string,
repo: string,
artifactId: number
): Promise<ApiResultIndexItem[]> {
): Promise<ApiResultIndex | undefined> {
const artifactPath = await downloadArtifact(credentials, owner, repo, artifactId);
const indexFilePath = path.join(artifactPath, 'index.json');
if (!(await fs.pathExists(indexFilePath))) {
void showAndLogWarningMessage('Could not find an `index.json` file in the result artifact.');
return [];
return undefined;
}
const resultIndex = await fs.readFile(path.join(artifactPath, 'index.json'), 'utf8');
@@ -106,8 +128,8 @@ async function getResultIndexItems(
/**
* Gets the status of a workflow run.
* @param credentials Credentials for authenticating to the GitHub API.
* @param owner
* @param repo
* @param owner
* @param repo
* @param workflowRunId The ID of the workflow run to get the result index for.
* @returns The workflow run status.
*/

View File

@@ -10,7 +10,6 @@ import {
} from 'vscode';
import * as path from 'path';
import { tmpDir } from '../run-queries';
import {
ToRemoteQueriesMessage,
FromRemoteQueriesMessage,
@@ -55,7 +54,7 @@ export class RemoteQueriesInterfaceManager {
queryResult: this.buildViewModel(query, queryResult)
});
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults());
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId));
}
/**
@@ -83,7 +82,8 @@ export class RemoteQueriesInterfaceManager {
totalResultCount: totalResultCount,
executionTimestamp: this.formatDate(query.executionStartTime),
executionDuration: executionDuration,
analysisSummaries: analysisSummaries
analysisSummaries: analysisSummaries,
analysisFailures: queryResult.analysisFailures,
};
}
@@ -99,7 +99,7 @@ export class RemoteQueriesInterfaceManager {
enableFindWidget: true,
retainContextWhenHidden: true,
localResourceRoots: [
Uri.file(tmpDir.name),
Uri.file(this.analysesResultsManager.storagePath),
Uri.file(path.join(this.ctx.extensionPath, 'out')),
],
}
@@ -225,7 +225,7 @@ export class RemoteQueriesInterfaceManager {
private async viewAnalysisResults(msg: RemoteQueryViewAnalysisResultsMessage): Promise<void> {
const downloadLink = msg.analysisSummary.downloadLink;
const filePath = path.join(tmpDir.name, downloadLink.id, downloadLink.innerFilePath || '');
const filePath = path.join(this.analysesResultsManager.storagePath, downloadLink.queryId, downloadLink.id, downloadLink.innerFilePath || '');
const sarifViewerExtensionId = 'MS-SarifVSCode.sarif-viewer';
@@ -240,6 +240,9 @@ export class RemoteQueriesInterfaceManager {
await sarifExt.activate();
}
// Clear any previous results before showing new results
await sarifExt.exports.closeAllLogs();
await sarifExt.exports.openLogs([
Uri.file(filePath),
]);
@@ -258,8 +261,8 @@ export class RemoteQueriesInterfaceManager {
return this.getPanel().webview.postMessage(msg);
}
private getDuration(startTime: Date, endTime: Date): string {
const diffInMs = startTime.getTime() - endTime.getTime();
private getDuration(startTime: number, endTime: number): string {
const diffInMs = startTime - endTime;
return this.formatDuration(diffInMs);
}
@@ -279,7 +282,8 @@ export class RemoteQueriesInterfaceManager {
}
}
private formatDate = (d: Date): string => {
private formatDate = (millis: number): string => {
const d = new Date(millis);
const datePart = d.toLocaleDateString(undefined, { day: 'numeric', month: 'short' });
const timePart = d.toLocaleTimeString(undefined, { hour: 'numeric', minute: 'numeric', hour12: true });
return `${datePart} at ${timePart}`;

View File

@@ -1,8 +1,12 @@
import { CancellationToken, commands, ExtensionContext, Uri, window } from 'vscode';
import { nanoid } from 'nanoid';
import * as path from 'path';
import * as fs from 'fs-extra';
import { Credentials } from '../authentication';
import { CodeQLCliServer } from '../cli';
import { ProgressCallback } from '../commandRunner';
import { showAndLogErrorMessage, showInformationMessageWithAction } from '../helpers';
import { createTimestampFile, showAndLogErrorMessage, showInformationMessageWithAction } from '../helpers';
import { Logger } from '../logging';
import { runRemoteQuery } from './run-remote-query';
import { RemoteQueriesInterfaceManager } from './remote-queries-interface';
@@ -13,11 +17,18 @@ import { RemoteQueryResultIndex } from './remote-query-result-index';
import { RemoteQueryResult } from './remote-query-result';
import { DownloadLink } from './download-link';
import { AnalysesResultsManager } from './analyses-results-manager';
import { assertNever } from '../pure/helpers-pure';
import { RemoteQueryHistoryItem } from './remote-query-history-item';
import { QueryHistoryManager } from '../query-history';
import { QueryStatus } from '../query-status';
import { DisposableObject } from '../pure/disposable-object';
import { QueryHistoryInfo } from '../query-results';
const autoDownloadMaxSize = 300 * 1024;
const autoDownloadMaxCount = 100;
export class RemoteQueriesManager {
const noop = () => { /* do nothing */ };
export class RemoteQueriesManager extends DisposableObject {
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
private readonly analysesResultsManager: AnalysesResultsManager;
private readonly interfaceManager: RemoteQueriesInterfaceManager;
@@ -25,11 +36,55 @@ export class RemoteQueriesManager {
constructor(
private readonly ctx: ExtensionContext,
private readonly cliServer: CodeQLCliServer,
private readonly qhm: QueryHistoryManager,
private readonly storagePath: string,
logger: Logger,
) {
this.analysesResultsManager = new AnalysesResultsManager(ctx, logger);
super();
this.analysesResultsManager = new AnalysesResultsManager(ctx, storagePath, logger);
this.interfaceManager = new RemoteQueriesInterfaceManager(ctx, logger, this.analysesResultsManager);
this.remoteQueriesMonitor = new RemoteQueriesMonitor(ctx, logger);
// Handle events from the query history manager
this.push(this.qhm.onDidAddQueryItem(this.handleAddQueryItem.bind(this)));
this.push(this.qhm.onDidRemoveQueryItem(this.handleRemoveQueryItem.bind(this)));
this.push(this.qhm.onWillOpenQueryItem(this.handleOpenQueryItem.bind(this)));
}
private async handleAddQueryItem(queryItem: QueryHistoryInfo) {
if (queryItem?.t === 'remote') {
if (!(await this.queryHistoryItemExists(queryItem))) {
// In this case, the query was deleted from disk, most likely because it was purged
// by another workspace. We should remove it from the history manager.
await this.qhm.handleRemoveHistoryItem(queryItem);
} else if (queryItem.status === QueryStatus.InProgress) {
// In this case, last time we checked, the query was still in progress.
// We need to setup the monitor to check for completion.
await commands.executeCommand('codeQL.monitorRemoteQuery', queryItem);
}
}
}
private async handleRemoveQueryItem(queryItem: QueryHistoryInfo) {
if (queryItem?.t === 'remote') {
this.analysesResultsManager.removeAnalysesResults(queryItem.queryId);
await this.removeStorageDirectory(queryItem);
}
}
private async handleOpenQueryItem(queryItem: QueryHistoryInfo) {
if (queryItem?.t === 'remote') {
try {
const remoteQueryResult = await this.retrieveJsonFile(queryItem, 'query-result.json') as RemoteQueryResult;
// open results in the background
void this.openResults(queryItem.remoteQuery, remoteQueryResult).then(
noop,
err => void showAndLogErrorMessage(err)
);
} catch (e) {
void showAndLogErrorMessage(`Could not open query results. ${e}`);
}
}
}
public async runRemoteQuery(
@@ -46,47 +101,75 @@ export class RemoteQueriesManager {
progress,
token);
if (querySubmission && querySubmission.query) {
void commands.executeCommand('codeQL.monitorRemoteQuery', querySubmission.query);
if (querySubmission?.query) {
const query = querySubmission.query;
const queryId = this.createQueryId(query.queryName);
const queryHistoryItem: RemoteQueryHistoryItem = {
t: 'remote',
status: QueryStatus.InProgress,
completed: false,
queryId,
label: query.queryName,
remoteQuery: query,
};
await this.prepareStorageDirectory(queryHistoryItem);
await this.storeJsonFile(queryHistoryItem, 'query.json', query);
this.qhm.addQuery(queryHistoryItem);
await this.qhm.refreshTreeView();
}
}
public async monitorRemoteQuery(
query: RemoteQuery,
queryItem: RemoteQueryHistoryItem,
cancellationToken: CancellationToken
): Promise<void> {
const credentials = await Credentials.initialize(this.ctx);
const queryResult = await this.remoteQueriesMonitor.monitorQuery(query, cancellationToken);
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(queryItem.remoteQuery, cancellationToken);
const executionEndTime = new Date();
const executionEndTime = Date.now();
if (queryResult.status === 'CompletedSuccessfully') {
const resultIndex = await getRemoteQueryIndex(credentials, query);
if (!resultIndex) {
void showAndLogErrorMessage(`There was an issue retrieving the result for the query ${query.queryName}`);
return;
if (queryWorkflowResult.status === 'CompletedSuccessfully') {
const resultIndex = await getRemoteQueryIndex(credentials, queryItem.remoteQuery);
queryItem.completed = true;
if (resultIndex) {
queryItem.status = QueryStatus.Completed;
const queryResult = this.mapQueryResult(executionEndTime, resultIndex, queryItem.queryId);
await this.storeJsonFile(queryItem, 'query-result.json', queryResult);
// Kick off auto-download of results in the background.
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
// Ask if the user wants to open the results in the background.
void this.askToOpenResults(queryItem.remoteQuery, queryResult).then(
noop,
err => {
void showAndLogErrorMessage(err);
}
);
} else {
void showAndLogErrorMessage(`There was an issue retrieving the result for the query ${queryItem.label}`);
queryItem.status = QueryStatus.Failed;
}
const queryResult = this.mapQueryResult(executionEndTime, resultIndex);
// Kick off auto-download of results.
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
const message = `Query "${query.queryName}" run on ${query.repositories.length} repositories and returned ${totalResultCount} results`;
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
if (shouldOpenView) {
await this.interfaceManager.showResults(query, queryResult);
}
} else if (queryResult.status === 'CompletedUnsuccessfully') {
await showAndLogErrorMessage(`Remote query execution failed. Error: ${queryResult.error}`);
return;
} else if (queryResult.status === 'Cancelled') {
await showAndLogErrorMessage('Remote query monitoring was cancelled');
} else if (queryWorkflowResult.status === 'CompletedUnsuccessfully') {
queryItem.failureReason = queryWorkflowResult.error;
queryItem.status = QueryStatus.Failed;
void showAndLogErrorMessage(`Remote query execution failed. Error: ${queryWorkflowResult.error}`);
} else if (queryWorkflowResult.status === 'Cancelled') {
queryItem.failureReason = 'Cancelled';
queryItem.status = QueryStatus.Failed;
void showAndLogErrorMessage('Remote query monitoring was cancelled');
} else if (queryWorkflowResult.status === 'InProgress') {
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
void showAndLogErrorMessage(`Unexpected status: ${queryWorkflowResult.status}`);
} else {
// Ensure all cases are covered
assertNever(queryWorkflowResult.status);
}
await this.qhm.refreshTreeView();
}
public async autoDownloadRemoteQueryResults(
@@ -109,21 +192,85 @@ export class RemoteQueriesManager {
results => this.interfaceManager.setAnalysisResults(results));
}
private mapQueryResult(executionEndTime: Date, resultIndex: RemoteQueryResultIndex): RemoteQueryResult {
const analysisSummaries = resultIndex.items.map(item => ({
private mapQueryResult(executionEndTime: number, resultIndex: RemoteQueryResultIndex, queryId: string): RemoteQueryResult {
const analysisSummaries = resultIndex.successes.map(item => ({
nwo: item.nwo,
resultCount: item.resultCount,
fileSizeInBytes: item.sarifFileSize ? item.sarifFileSize : item.bqrsFileSize,
downloadLink: {
id: item.artifactId.toString(),
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs'
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs',
queryId,
} as DownloadLink
}));
const analysisFailures = resultIndex.failures.map(item => ({
nwo: item.nwo,
error: item.error
}));
return {
executionEndTime,
analysisSummaries
analysisSummaries,
analysisFailures,
queryId
};
}
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
await this.interfaceManager.showResults(query, queryResult);
}
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
const message = `Query "${query.queryName}" run on ${query.repositories.length} repositories and returned ${totalResultCount} results`;
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
if (shouldOpenView) {
await this.openResults(query, queryResult);
}
}
/**
* Generates a unique id for this query, suitable for determining the storage location for the downloaded query artifacts.
* @param queryName
* @returns
*/
private createQueryId(queryName: string): string {
return `${queryName}-${nanoid()}`;
}
/**
* Prepares a directory for storing analysis results for a single query run.
* This directory contains a timestamp file, which will be
* used by the query history manager to determine when the directory
* should be deleted.
*
* @param queryName The name of the query that was run.
*/
private async prepareStorageDirectory(queryHistoryItem: RemoteQueryHistoryItem): Promise<void> {
await createTimestampFile(path.join(this.storagePath, queryHistoryItem.queryId));
}
private async storeJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string, obj: T): Promise<void> {
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
await fs.writeFile(filePath, JSON.stringify(obj, null, 2), 'utf8');
}
private async retrieveJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string): Promise<T> {
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
return JSON.parse(await fs.readFile(filePath, 'utf8'));
}
private async removeStorageDirectory(queryItem: RemoteQueryHistoryItem): Promise<void> {
const filePath = path.join(this.storagePath, queryItem.queryId);
await fs.remove(filePath);
}
private async queryHistoryItemExists(queryItem: RemoteQueryHistoryItem): Promise<boolean> {
const filePath = path.join(this.storagePath, queryItem.queryId);
return await fs.pathExists(filePath);
}
}

View File

@@ -0,0 +1,15 @@
import { QueryStatus } from '../query-status';
import { RemoteQuery } from './remote-query';
/**
* Information about a remote query.
*/
export interface RemoteQueryHistoryItem {
readonly t: 'remote';
failureReason?: string;
status: QueryStatus;
completed: boolean;
readonly queryId: string,
label: string, // TODO, the query label should have interpolation like local queries
remoteQuery: RemoteQuery,
}

View File

@@ -1,9 +1,10 @@
export interface RemoteQueryResultIndex {
artifactsUrlPath: string;
items: RemoteQueryResultIndexItem[];
successes: RemoteQuerySuccessIndexItem[];
failures: RemoteQueryFailureIndexItem[];
}
export interface RemoteQueryResultIndexItem {
export interface RemoteQuerySuccessIndexItem {
id: string;
artifactId: number;
nwo: string;
@@ -11,3 +12,10 @@ export interface RemoteQueryResultIndexItem {
bqrsFileSize: number;
sarifFileSize?: number;
}
export interface RemoteQueryFailureIndexItem {
id: string;
artifactId: number;
nwo: string;
error: string;
}

View File

@@ -1,8 +1,11 @@
import { DownloadLink } from './download-link';
import { AnalysisFailure } from './shared/analysis-failure';
export interface RemoteQueryResult {
executionEndTime: Date;
executionEndTime: number; // Can't use a Date here since it needs to be serialized and desserialized.
analysisSummaries: AnalysisSummary[];
analysisFailures: AnalysisFailure[];
queryId: string;
}
export interface AnalysisSummary {

View File

@@ -6,6 +6,6 @@ export interface RemoteQuery {
queryText: string;
controllerRepository: Repository;
repositories: Repository[];
executionStartTime: Date;
executionStartTime: number; // Use number here since it needs to be serialized and desserialized.
actionsWorkflowRunId: number;
}

View File

@@ -10,25 +10,19 @@ import {
showAndLogErrorMessage,
showAndLogInformationMessage,
showInformationMessageWithAction,
tryGetQueryMetadata
tryGetQueryMetadata,
tmpDir
} from '../helpers';
import { Credentials } from '../authentication';
import * as cli from '../cli';
import { logger } from '../logging';
import { getRemoteControllerRepo, getRemoteRepositoryLists, setRemoteControllerRepo } from '../config';
import { tmpDir } from '../run-queries';
import { ProgressCallback, UserCancellationException } from '../commandRunner';
import { OctokitResponse } from '@octokit/types/dist-types';
import { RemoteQuery } from './remote-query';
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
import { QueryMetadata } from '../pure/interface-types';
interface Config {
repositories: string[];
ref?: string;
language?: string;
}
export interface QlPack {
name: string;
version: string;
@@ -108,7 +102,7 @@ export async function getRepositories(): Promise<string[] | undefined> {
*
* @returns the entire qlpack as a base64 string.
*/
async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: string, queryPackDir: string, fallbackLanguage?: string): Promise<{
async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: string, queryPackDir: string): Promise<{
base64Pack: string,
language: string
}> {
@@ -150,7 +144,7 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
} else {
// open popup to ask for language if not already hardcoded
language = fallbackLanguage || await askForLanguage(cliServer);
language = await askForLanguage(cliServer);
// copy only the query file to the query pack directory
// and generate a synthetic query pack
@@ -172,6 +166,9 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
await ensureNameAndSuite(queryPackDir, packRelativePath);
// Clear the cliServer cache so that the previous qlpack text is purged from the CLI.
await cliServer.clearCache();
const bundlePath = await getPackedBundlePath(queryPackDir);
void logger.log(`Compiling and bundling query pack from ${queryPackDir} to ${bundlePath}. (This may take a while.)`);
await cliServer.packInstall(queryPackDir);
@@ -238,47 +235,22 @@ export async function runRemoteQuery(
throw new UserCancellationException('Not a CodeQL query file.');
}
progress({
maxStep: 5,
step: 1,
message: 'Determining project list'
});
const queryFile = uri.fsPath;
const repositoriesFile = queryFile.substring(0, queryFile.length - '.ql'.length) + '.repositories';
let ref: string | undefined;
// For the case of single file remote queries, use the language from the config in order to avoid the user having to select it.
let fallbackLanguage: string | undefined;
let repositories: string[] | undefined;
progress({
maxStep: 5,
step: 2,
maxStep: 4,
step: 1,
message: 'Determining query target language'
});
// If the user has an explicit `.repositories` file, use that.
// Otherwise, prompt user to select repositories from the `codeQL.remoteQueries.repositoryLists` setting.
if (await fs.pathExists(repositoriesFile)) {
void logger.log(`Found '${repositoriesFile}'. Using information from that file to run ${queryFile}.`);
const config = yaml.safeLoad(await fs.readFile(repositoriesFile, 'utf8')) as Config;
ref = config.ref || 'main';
fallbackLanguage = config.language;
repositories = config.repositories;
} else {
ref = 'main';
repositories = await getRepositories();
}
const repositories = await getRepositories();
if (!repositories || repositories.length === 0) {
throw new UserCancellationException('No repositories to query.');
}
progress({
maxStep: 5,
step: 3,
maxStep: 4,
step: 2,
message: 'Determining controller repo'
});
@@ -309,8 +281,8 @@ export async function runRemoteQuery(
const [owner, repo] = controllerRepo.split('/');
progress({
maxStep: 5,
step: 4,
maxStep: 4,
step: 3,
message: 'Bundling the query pack'
});
@@ -318,20 +290,21 @@ export async function runRemoteQuery(
throw new UserCancellationException('Cancelled');
}
const { base64Pack, language } = await generateQueryPack(cliServer, queryFile, queryPackDir, fallbackLanguage);
const { base64Pack, language } = await generateQueryPack(cliServer, queryFile, queryPackDir);
if (token.isCancellationRequested) {
throw new UserCancellationException('Cancelled');
}
progress({
maxStep: 5,
step: 5,
maxStep: 4,
step: 4,
message: 'Sending request'
});
const workflowRunId = await runRemoteQueriesApiRequest(credentials, ref, language, repositories, owner, repo, base64Pack, dryRun);
const queryStartTime = new Date();
// TODO When https://github.com/dsp-testing/qc-run2/pull/567 is merged, we can change the branch back to `main`.
const workflowRunId = await runRemoteQueriesApiRequest(credentials, 'better-errors', language, repositories, owner, repo, base64Pack, dryRun);
const queryStartTime = Date.now();
const queryMetadata = await tryGetQueryMetadata(cliServer, queryFile);
if (dryRun) {
@@ -410,16 +383,12 @@ export async function attemptRerun(
) {
if (typeof error.message === 'string' && error.message.includes('Some repositories were invalid')) {
const invalidRepos = error?.response?.data?.invalid_repos || [];
const reposWithoutDbUploads = error?.response?.data?.repos_without_db_uploads || [];
void logger.log('Unable to run query on some of the specified repositories');
if (invalidRepos.length > 0) {
void logger.log(`Invalid repos: ${invalidRepos.join(', ')}`);
}
if (reposWithoutDbUploads.length > 0) {
void logger.log(`Repos without DB uploads: ${reposWithoutDbUploads.join(', ')}`);
}
if (invalidRepos.length + reposWithoutDbUploads.length === repositories.length) {
if (invalidRepos.length === repositories.length) {
// Every repo is invalid in some way
void showAndLogErrorMessage('Unable to run query on any of the specified repositories.');
return;
@@ -428,7 +397,7 @@ export async function attemptRerun(
const popupMessage = 'Unable to run query on some of the specified repositories. [See logs for more details](command:codeQL.showLogs).';
const rerunQuery = await showInformationMessageWithAction(popupMessage, 'Rerun on the valid repositories only');
if (rerunQuery) {
const validRepositories = repositories.filter(r => !invalidRepos.includes(r) && !reposWithoutDbUploads.includes(r));
const validRepositories = repositories.filter(r => !invalidRepos.includes(r));
void logger.log(`Rerunning query on set of valid repositories: ${JSON.stringify(validRepositories)}`);
return await runRemoteQueriesApiRequest(credentials, ref, language, validRepositories, owner, repo, queryPackBase64, dryRun);
}
@@ -467,10 +436,10 @@ async function buildRemoteQueryEntity(
queryMetadata: QueryMetadata | undefined,
controllerRepoOwner: string,
controllerRepoName: string,
queryStartTime: Date,
queryStartTime: number,
workflowRunId: number
): Promise<RemoteQuery> {
// The query name is either the name as specified in the query metadata, or the file name.
// The query name is either the name as specified in the query metadata, or the file name.
const queryName = queryMetadata?.name ?? path.basename(queryFilePath);
const queryRepos = repositories.map(r => {

View File

@@ -32,12 +32,13 @@ export const sampleRemoteQuery: RemoteQuery = {
name: 'repo5'
}
],
executionStartTime: new Date('2022-01-06T17:02:15.026Z'),
executionStartTime: new Date('2022-01-06T17:02:15.026Z').getTime(),
actionsWorkflowRunId: 1662757118
};
export const sampleRemoteQueryResult: RemoteQueryResult = {
executionEndTime: new Date('2022-01-06T17:04:37.026Z'),
queryId: 'query123',
executionEndTime: new Date('2022-01-06T17:04:37.026Z').getTime(),
analysisSummaries: [
{
nwo: 'big-corp/repo1',
@@ -46,7 +47,8 @@ export const sampleRemoteQueryResult: RemoteQueryResult = {
downloadLink: {
id: '137697017',
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697017',
innerFilePath: 'results.sarif'
innerFilePath: 'results.sarif',
queryId: 'query.ql-123-xyz'
}
},
{
@@ -56,7 +58,8 @@ export const sampleRemoteQueryResult: RemoteQueryResult = {
downloadLink: {
id: '137697018',
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697018',
innerFilePath: 'results.sarif'
innerFilePath: 'results.sarif',
queryId: 'query.ql-123-xyz'
}
},
{
@@ -66,7 +69,8 @@ export const sampleRemoteQueryResult: RemoteQueryResult = {
downloadLink: {
id: '137697019',
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697019',
innerFilePath: 'results.sarif'
innerFilePath: 'results.sarif',
queryId: 'query.ql-123-xyz'
}
},
{
@@ -76,9 +80,20 @@ export const sampleRemoteQueryResult: RemoteQueryResult = {
downloadLink: {
id: '137697020',
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697020',
innerFilePath: 'results.sarif'
innerFilePath: 'results.sarif',
queryId: 'query.ql-123-xyz'
}
}
],
analysisFailures: [
{
nwo: 'big-corp/repo5',
error: 'Error message'
},
{
nwo: 'big-corp/repo6',
error: 'Error message'
},
]
};

View File

@@ -0,0 +1,4 @@
export interface AnalysisFailure {
nwo: string,
error: string
}

View File

@@ -1,4 +1,5 @@
import { DownloadLink } from '../download-link';
import { AnalysisFailure } from './analysis-failure';
export interface RemoteQueryResult {
queryTitle: string;
@@ -10,7 +11,8 @@ export interface RemoteQueryResult {
totalResultCount: number;
executionTimestamp: string;
executionDuration: string;
analysisSummaries: AnalysisSummary[]
analysisSummaries: AnalysisSummary[],
analysisFailures: AnalysisFailure[];
}
export interface AnalysisSummary {

View File

@@ -1,6 +1,6 @@
import * as React from 'react';
import * as octicons from '../../view/octicons';
import styled from 'styled-components';
import { DownloadIcon } from '@primer/octicons-react';
const ButtonLink = styled.a`
display: inline-block;
@@ -16,7 +16,7 @@ const ButtonLink = styled.a`
const DownloadButton = ({ text, onClick }: { text: string, onClick: () => void }) => (
<ButtonLink onClick={onClick}>
{octicons.download}{text}
<DownloadIcon size={16} />{text}
</ButtonLink>
);

View File

@@ -1,10 +1,9 @@
import * as React from 'react';
import { useEffect, useState } from 'react';
import * as Rdom from 'react-dom';
import { ThemeProvider } from '@primer/react';
import { Flash, ThemeProvider } from '@primer/react';
import { ToRemoteQueriesMessage } from '../../pure/interface-types';
import { AnalysisSummary, RemoteQueryResult } from '../shared/remote-query-result';
import * as octicons from '../../view/octicons';
import { vscode } from '../../view/vscode-api';
@@ -17,7 +16,7 @@ import DownloadButton from './DownloadButton';
import { AnalysisResults } from '../shared/analysis-result';
import DownloadSpinner from './DownloadSpinner';
import CollapsibleItem from './CollapsibleItem';
import { FileSymlinkFileIcon } from '@primer/octicons-react';
import { AlertIcon, CodeSquareIcon, FileCodeIcon, FileSymlinkFileIcon, RepoIcon } from '@primer/octicons-react';
const numOfReposInContractedMode = 10;
@@ -31,7 +30,8 @@ const emptyQueryResult: RemoteQueryResult = {
totalResultCount: 0,
executionTimestamp: '',
executionDuration: '',
analysisSummaries: []
analysisSummaries: [],
analysisFailures: [],
};
const downloadAnalysisResults = (analysisSummary: AnalysisSummary) => {
@@ -75,15 +75,17 @@ const sumAnalysesResults = (analysesResults: AnalysisResults[]) =>
const QueryInfo = (queryResult: RemoteQueryResult) => (
<>
<VerticalSpace size={1} />
{queryResult.totalResultCount} results in {queryResult.totalRepositoryCount} repositories
{queryResult.totalResultCount} results from running against {queryResult.totalRepositoryCount} repositories
({queryResult.executionDuration}), {queryResult.executionTimestamp}
<VerticalSpace size={1} />
<span className="vscode-codeql__query-file">{octicons.file}
<span className="vscode-codeql__query-file">
<FileCodeIcon size={16} />
<a className="vscode-codeql__query-file-link" href="#" onClick={() => openQueryFile(queryResult)}>
{queryResult.queryFileName}
</a>
</span>
<span>{octicons.codeSquare}
<span>
<CodeSquareIcon size={16} />
<a className="vscode-codeql__query-file-link" href="#" onClick={() => openQueryTextVirtualFile(queryResult)}>
query
</a>
@@ -91,6 +93,31 @@ const QueryInfo = (queryResult: RemoteQueryResult) => (
</>
);
const Failures = (queryResult: RemoteQueryResult) => {
if (queryResult.analysisFailures.length === 0) {
return <></>;
}
return (
<>
<VerticalSpace size={3} />
<Flash variant="danger">
{queryResult.analysisFailures.map((f, i) => (
<div key={i}>
<p className="vscode-codeql__analysis-failure">
<AlertIcon size={16} />
<b>{f.nwo}: </b>
{f.error}
</p>
{
i === queryResult.analysisFailures.length - 1 ? <></> : <VerticalSpace size={1} />
}
</div>
))}
</Flash>
</>
);
};
const SummaryTitleWithResults = ({
queryResult,
analysesResults
@@ -155,7 +182,7 @@ const SummaryItem = ({
analysisResults: AnalysisResults | undefined
}) => (
<span>
<span className="vscode-codeql__analysis-item">{octicons.repo}</span>
<span className="vscode-codeql__analysis-item"><RepoIcon size={16} /></span>
<span className="vscode-codeql__analysis-item">{analysisSummary.nwo}</span>
<span className="vscode-codeql__analysis-item"><Badge text={analysisSummary.resultCount.toString()} /></span>
<span className="vscode-codeql__analysis-item">
@@ -293,9 +320,10 @@ export function RemoteQueries(): JSX.Element {
try {
return <div>
<ThemeProvider>
<ThemeProvider colorMode="auto">
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
<QueryInfo {...queryResult} />
<Failures {...queryResult} />
<Summary queryResult={queryResult} analysesResults={analysesResults} />
{showAnalysesResults && <AnalysesResults analysesResults={analysesResults} totalResults={queryResult.totalResultCount} />}
</ThemeProvider>

View File

@@ -1,15 +1,3 @@
.octicon {
fill: var(--vscode-editor-foreground);
height: 1.2em;
width: 1.2em;
vertical-align: middle;
display: inline-block;
}
.octicon-light {
opacity: 0.6;
}
.vscode-codeql__query-file {
padding-right: 1em;
}
@@ -64,3 +52,10 @@
.vscode-codeql__analysis-result-file-link {
vertical-align: middle;
}
.vscode-codeql__analysis-failure {
margin: 0;
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
Liberation Mono, monospace;
color: var(--vscode-editor-foreground);
}

View File

@@ -1,7 +1,7 @@
import * as crypto from 'crypto';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as tmp from 'tmp-promise';
import * as path from 'path';
import { nanoid } from 'nanoid';
import {
CancellationToken,
@@ -10,15 +10,14 @@ import {
TextDocument,
TextEditor,
Uri,
window,
workspace
window
} from 'vscode';
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
import * as cli from './cli';
import * as config from './config';
import { DatabaseItem, DatabaseManager } from './databases';
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tryGetQueryMetadata } from './helpers';
import { createTimestampFile, getOnDiskWorkspaceFolders, showAndLogErrorMessage, tryGetQueryMetadata, upgradesTmpDir } from './helpers';
import { ProgressCallback, UserCancellationException } from './commandRunner';
import { DatabaseInfo, QueryMetadata } from './pure/interface-types';
import { logger } from './logging';
@@ -33,22 +32,20 @@ import { DecodedBqrsChunk } from './pure/bqrs-cli-types';
/**
* run-queries.ts
* -------------
* --------------
*
* Compiling and running QL queries.
*/
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
export const upgradesTmpDir = tmp.dirSync({ dir: tmpDir.name, prefix: 'upgrades_', keep: false, unsafeCleanup: true });
export const tmpDirDisposal = {
dispose: () => {
upgradesTmpDir.removeCallback();
tmpDir.removeCallback();
}
};
// exported for testing
export const queriesDir = path.join(tmpDir.name, 'queries');
/**
* Information about which query will be to be run. `quickEvalPosition` and `quickEvalText`
* is only filled in if the query is a quick query.
*/
interface SelectedQuery {
queryPath: string;
quickEvalPosition?: messages.Position;
quickEvalText?: string;
}
/**
* A collection of evaluation-time information about a query,
@@ -57,14 +54,13 @@ export const queriesDir = path.join(tmpDir.name, 'queries');
* output and results.
*/
export class QueryEvaluationInfo {
readonly querySaveDir: string;
/**
* Note that in the {@link FullQueryInfo.slurp} method, we create a QueryEvaluationInfo instance
* by explicitly setting the prototype in order to avoid calling this constructor.
*/
constructor(
public readonly id: string,
private readonly querySaveDir: string,
public readonly dbItemPath: string,
private readonly databaseHasMetadataFile: boolean,
public readonly queryDbscheme: string, // the dbscheme file the query expects, based on library path resolution
@@ -72,7 +68,7 @@ export class QueryEvaluationInfo {
public readonly metadata?: QueryMetadata,
public readonly templates?: messages.TemplateDefinitions
) {
this.querySaveDir = path.join(queriesDir, this.id);
/**/
}
get dilPath() {
@@ -98,6 +94,14 @@ export class QueryEvaluationInfo {
return path.join(this.querySaveDir, `sortedResults-${resultSetName}.bqrs`);
}
/**
* Creates a file in the query directory that indicates when this query was created.
* This is important for keeping track of when queries should be removed.
*/
async createTimestampFile() {
await createTimestampFile(this.querySaveDir);
}
async run(
qs: qsClient.QueryServerClient,
upgradeQlo: string | undefined,
@@ -126,6 +130,7 @@ export class QueryEvaluationInfo {
id: callbackId,
timeoutSecs: qs.config.timeoutSecs,
};
const dataset: messages.Dataset = {
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: 'default'
@@ -230,6 +235,10 @@ export class QueryEvaluationInfo {
return fs.pathExists(this.csvPath);
}
/**
* Returns the path to the DIL file produced by this query. If the query has not yet produced DIL,
* this will return first create the DIL file and then return the path to the DIL file.
*/
async ensureDilPath(qs: qsClient.QueryServerClient): Promise<string> {
if (await this.hasDil()) {
return this.dilPath;
@@ -245,6 +254,10 @@ export class QueryEvaluationInfo {
return this.dilPath;
}
/**
* Creates the CSV file containing the results of this query. This will only be called if the query
* does not have interpreted results and the CSV file does not already exist.
*/
async exportCsvResults(qs: qsClient.QueryServerClient, csvPath: string, onFinish: () => void): Promise<void> {
let stopDecoding = false;
const out = fs.createWriteStream(csvPath);
@@ -261,14 +274,21 @@ export class QueryEvaluationInfo {
pageSize: 100,
offset: nextOffset,
});
for (const tuple of chunk.tuples)
for (const tuple of chunk.tuples) {
out.write(tuple.join(',') + '\n');
}
nextOffset = chunk.next;
}
out.end();
}
async ensureCsvProduced(qs: qsClient.QueryServerClient, dbm: DatabaseManager): Promise<string> {
/**
* Returns the path to the CSV alerts interpretation of this query results. If CSV results have
* not yet been produced, this will return first create the CSV results and then return the path.
*
* This method only works for queries with interpreted results.
*/
async ensureCsvAlerts(qs: qsClient.QueryServerClient, dbm: DatabaseManager): Promise<string> {
if (await this.hasCsv()) {
return this.csvPath;
}
@@ -291,6 +311,13 @@ export class QueryEvaluationInfo {
await qs.cliServer.generateResultsCsv(ensureMetadataIsComplete(this.metadata), this.resultsPaths.resultsPath, this.csvPath, sourceInfo);
return this.csvPath;
}
/**
* Cleans this query's results directory.
*/
async deleteQuery(): Promise<void> {
await fs.remove(this.querySaveDir);
}
}
export interface QueryWithResults {
@@ -324,9 +351,7 @@ export async function clearCacheInDatabase(
}
/**
*
* @param filePath This needs to be equivalent to java Path.toRealPath(NO_FOLLOW_LINKS)
*
* @param filePath This needs to be equivalent to Java's `Path.toRealPath(NO_FOLLOW_LINKS)`
*/
async function convertToQlPath(filePath: string): Promise<string> {
if (process.platform === 'win32') {
@@ -372,9 +397,9 @@ async function getSelectedPosition(editor: TextEditor, range?: Range): Promise<m
/**
* Compare the dbscheme implied by the query `query` and that of the current database.
* If they are compatible, do nothing.
* If they are incompatible but the database can be upgraded, suggest that upgrade.
* If they are incompatible and the database cannot be upgraded, throw an error.
* - If they are compatible, do nothing.
* - If they are incompatible but the database can be upgraded, suggest that upgrade.
* - If they are incompatible and the database cannot be upgraded, throw an error.
*/
async function checkDbschemeCompatibility(
cliServer: cli.CodeQLCliServer,
@@ -422,7 +447,9 @@ async function checkDbschemeCompatibility(
}
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryEvaluationInfo): void {
throw new Error(`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`);
throw new Error(
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
);
}
/**
@@ -467,7 +494,6 @@ async function compileNonDestructiveUpgrade(
qlProgram.dbschemePath = query.queryDbscheme;
// We are new enough that we will always support single file upgrades.
return result.compiledUpgrade;
}
/**
@@ -512,12 +538,6 @@ async function promptUserToSaveChanges(document: TextDocument): Promise<boolean>
return false;
}
type SelectedQuery = {
queryPath: string;
quickEvalPosition?: messages.Position;
quickEvalText?: string;
};
/**
* Determines which QL file to run during an invocation of `Run Query` or `Quick Evaluation`, as follows:
* - If the command was called by clicking on a file, then use that file.
@@ -528,12 +548,19 @@ type SelectedQuery = {
* @param selectedResourceUri The selected resource when the command was run.
* @param quickEval Whether the command being run is `Quick Evaluation`.
*/
export async function determineSelectedQuery(selectedResourceUri: Uri | undefined, quickEval: boolean, range?: Range): Promise<SelectedQuery> {
export async function determineSelectedQuery(
selectedResourceUri: Uri | undefined,
quickEval: boolean,
range?: Range
): Promise<SelectedQuery> {
const editor = window.activeTextEditor;
// Choose which QL file to use.
let queryUri: Uri;
if (selectedResourceUri === undefined) {
if (selectedResourceUri) {
// A resource was passed to the command handler, so use it.
queryUri = selectedResourceUri;
} else {
// No resource was passed to the command handler, so obtain it from the active editor.
// This usually happens when the command is called from the Command Palette.
if (editor === undefined) {
@@ -541,9 +568,6 @@ export async function determineSelectedQuery(selectedResourceUri: Uri | undefine
} else {
queryUri = editor.document.uri;
}
} else {
// A resource was passed to the command handler, so use it.
queryUri = selectedResourceUri;
}
if (queryUri.scheme !== 'file') {
@@ -599,6 +623,7 @@ export async function compileAndRunQueryAgainstDatabase(
qs: qsClient.QueryServerClient,
dbItem: DatabaseItem,
initialInfo: InitialQueryInfo,
queryStorageDir: string,
progress: ProgressCallback,
token: CancellationToken,
templates?: messages.TemplateDefinitions,
@@ -643,13 +668,7 @@ export async function compileAndRunQueryAgainstDatabase(
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
let availableMlModels: cli.MlModelInfo[] = [];
// The `capabilities.untrustedWorkspaces.restrictedConfigurations` entry in package.json doesn't
// work with hidden settings, so we manually check that the workspace is trusted before looking at
// whether the `shouldInsecurelyLoadMlModelsFromPacks` setting is enabled.
if (workspace.isTrusted &&
config.isCanary() &&
config.shouldInsecurelyLoadMlModelsFromPacks() &&
await cliServer.cliConstraints.supportsResolveMlModels()) {
if (await cliServer.cliConstraints.supportsResolveMlModels()) {
try {
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders)).models;
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
@@ -662,7 +681,7 @@ export async function compileAndRunQueryAgainstDatabase(
const hasMetadataFile = (await dbItem.hasMetadataFile());
const query = new QueryEvaluationInfo(
initialInfo.id,
path.join(queryStorageDir, initialInfo.id),
dbItem.databaseUri.fsPath,
hasMetadataFile,
packConfig.dbscheme,
@@ -670,11 +689,13 @@ export async function compileAndRunQueryAgainstDatabase(
metadata,
templates
);
await query.createTimestampFile();
const upgradeDir = await tmp.dir({ dir: upgradesTmpDir.name, unsafeCleanup: true });
let upgradeDir: tmp.DirectoryResult | undefined;
try {
let upgradeQlo;
if (await hasNondestructiveUpgradeCapabilities(qs)) {
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
} else {
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
@@ -733,13 +754,23 @@ export async function compileAndRunQueryAgainstDatabase(
}
} finally {
try {
await upgradeDir.cleanup();
await upgradeDir?.cleanup();
} catch (e) {
void qs.logger.log(`Could not clean up the upgrades dir. Reason: ${e.message || e}`);
}
}
}
/**
* Determines the initial information for a query. This is everything of interest
* we know about this query that is available before it is run.
*
* @param selectedQueryUri The Uri of the document containing the query to be run.
* @param databaseInfo The database to run the query against.
* @param isQuickEval true if this is a quick evaluation.
* @param range the selection range of the query to be run. Only used if isQuickEval is true.
* @returns The initial information for the query to be run.
*/
export async function createInitialQueryInfo(
selectedQueryUri: Uri | undefined,
databaseInfo: DatabaseInfo,
@@ -770,12 +801,14 @@ const compilationFailedErrorTail = ' compilation failed. Please make sure there
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
' and choose CodeQL Query Server from the dropdown.';
/**
* Create a synthetic result for a query that failed to compile.
*/
function createSyntheticResult(
query: QueryEvaluationInfo,
message: string,
resultType: number
): QueryWithResults {
return {
query,
result: {

View File

@@ -1,10 +1,9 @@
import * as vscode from 'vscode';
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage } from './helpers';
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from './helpers';
import { ProgressCallback, UserCancellationException } from './commandRunner';
import { logger } from './logging';
import * as messages from './pure/messages';
import * as qsClient from './queryserver-client';
import { upgradesTmpDir } from './run-queries';
import * as tmp from 'tmp-promise';
import * as path from 'path';
import * as semver from 'semver';
@@ -180,7 +179,7 @@ export async function upgradeDatabaseExplicit(
if (finalDbscheme === undefined) {
throw new Error('Could not determine target dbscheme to upgrade to.');
}
const currentUpgradeTmp = await tmp.dir({ dir: upgradesTmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
const currentUpgradeTmp = await tmp.dir({ dir: tmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
try {
let compileUpgradeResult: messages.CompileUpgradeResult;
try {

View File

@@ -20,23 +20,3 @@ export const listUnordered = <svg className="octicon octicon-light" width="16" h
export const info = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg" >
<path fillRule="evenodd" clipRule="evenodd" d="M8.568 1.03a6.8 6.8 0 0 1 4.192 2.02 7.06 7.06 0 0 1 .46 9.39 6.85 6.85 0 0 1-8.58 1.74 7 7 0 0 1-3.12-3.5 7.12 7.12 0 0 1-.23-4.71 7 7 0 0 1 2.77-3.79 6.8 6.8 0 0 1 4.508-1.15zm.472 12.85a5.89 5.89 0 0 0 3.41-2.07 6.07 6.07 0 0 0-.4-8.06 5.82 5.82 0 0 0-7.43-.74 6.06 6.06 0 0 0 .5 10.29 5.81 5.81 0 0 0 3.92.58zM8.51 7h-1v4h1V7zm0-2h-1v1h1V5z" />
</svg>;
/**
* The icons below come from https://primer.style/octicons/
*/
export const file = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" d="M3.75 1.5a.25.25 0 00-.25.25v11.5c0 .138.112.25.25.25h8.5a.25.25 0 00.25-.25V6H9.75A1.75 1.75 0 018 4.25V1.5H3.75zm5.75.56v2.19c0 .138.112.25.25.25h2.19L9.5 2.06zM2 1.75C2 .784 2.784 0 3.75 0h5.086c.464 0 .909.184 1.237.513l3.414 3.414c.329.328.513.773.513 1.237v8.086A1.75 1.75 0 0112.25 15h-8.5A1.75 1.75 0 012 13.25V1.75z"></path>
</svg>;
export const codeSquare = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" d="M1.75 1.5a.25.25 0 00-.25.25v12.5c0 .138.112.25.25.25h12.5a.25.25 0 00.25-.25V1.75a.25.25 0 00-.25-.25H1.75zM0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0114.25 16H1.75A1.75 1.75 0 010 14.25V1.75zm9.22 3.72a.75.75 0 000 1.06L10.69 8 9.22 9.47a.75.75 0 101.06 1.06l2-2a.75.75 0 000-1.06l-2-2a.75.75 0 00-1.06 0zM6.78 6.53a.75.75 0 00-1.06-1.06l-2 2a.75.75 0 000 1.06l2 2a.75.75 0 101.06-1.06L5.31 8l1.47-1.47z"></path>
</svg>;
export const repo = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path>
</svg>;
export const download = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" d="M7.47 10.78a.75.75 0 001.06 0l3.75-3.75a.75.75 0 00-1.06-1.06L8.75 8.44V1.75a.75.75 0 00-1.5 0v6.69L4.78 5.97a.75.75 0 00-1.06 1.06l3.75 3.75zM3.75 13a.75.75 0 000 1.5h8.5a.75.75 0 000-1.5h-8.5z"></path>
</svg>;

View File

@@ -16,6 +16,7 @@ import { CodeQLCliServer } from '../../cli';
import { QueryServerClient } from '../../queryserver-client';
import { skipIfNoCodeQL } from '../ensureCli';
import { QueryResultType } from '../../pure/messages';
import { tmpDir } from '../../helpers';
/**
@@ -97,6 +98,7 @@ describe('Queries', function() {
qs,
dbItem,
await mockInitialQueryInfo(queryPath),
path.join(tmpDir.name, 'mock-storage-path'),
progress,
token
);
@@ -119,6 +121,7 @@ describe('Queries', function() {
qs,
dbItem,
await mockInitialQueryInfo(queryPath),
path.join(tmpDir.name, 'mock-storage-path'),
progress,
token
);

View File

@@ -96,12 +96,16 @@ describe('Remote queries', function() {
expect(fs.existsSync(path.join(queryPackDir, 'not-in-pack.ql'))).to.be.false;
// the compiled pack
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/github/remote-query-pack/0.0.0/');
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/codeql-remote/query/0.0.0/');
printDirectoryContents(compiledPackDir);
expect(fs.existsSync(path.join(compiledPackDir, 'in-pack.ql'))).to.be.true;
expect(fs.existsSync(path.join(compiledPackDir, 'lib.qll'))).to.be.true;
expect(fs.existsSync(path.join(compiledPackDir, 'qlpack.yml'))).to.be.true;
// should have generated a correct qlpack file
const qlpackContents: any = yaml.safeLoad(fs.readFileSync(path.join(compiledPackDir, 'qlpack.yml'), 'utf8'));
expect(qlpackContents.name).to.equal('codeql-remote/query');
// depending on the cli version, we should have one of these files
expect(
fs.existsSync(path.join(compiledPackDir, 'qlpack.lock.yml')) ||
@@ -211,7 +215,7 @@ describe('Remote queries', function() {
expect(fs.existsSync(path.join(queryPackDir, 'not-in-pack.ql'))).to.be.false;
// the compiled pack
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/github/remote-query-pack/0.0.0/');
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/codeql-remote/query/0.0.0/');
printDirectoryContents(compiledPackDir);
expect(fs.existsSync(path.join(compiledPackDir, 'otherfolder/lib.qll'))).to.be.true;
expect(fs.existsSync(path.join(compiledPackDir, 'subfolder/in-pack.ql'))).to.be.true;

View File

@@ -0,0 +1,23 @@
import * as path from 'path';
import * as fs from 'fs-extra';
/**
* Recursively walk a directory and return the full path to all files found.
* Note that this function uses synchronous fs calls, so it should only be used in tests.
*
* @param dir the directory to walk
*
* @return An iterator of the full path to all files recursively found in the directory.
*/
export function* walk(dir: string): IterableIterator<string> {
const files = fs.readdirSync(dir);
for (const file of files) {
const filePath = path.join(dir, file);
const stat = fs.statSync(filePath);
if (stat.isDirectory()) {
yield* walk(filePath);
} else {
yield filePath;
}
}
}

View File

@@ -0,0 +1,16 @@
import { Disposable } from 'vscode';
import { DisposableObject } from '../pure/disposable-object';
/**
* A simple disposable object that does nothing other than contain a list of disposable objects.
* This is useful for implementing a `Disposable` that owns other disposable objects.
*/
export class DisposableBucket extends DisposableObject {
/**
* Add a disposable object to this bucket.
* @param obj The object to add.
*/
public push<T extends Disposable>(obj: T): T {
return super.push(obj);
}
}

View File

@@ -44,7 +44,7 @@ const _10MB = _1MB * 10;
// CLI version to test. Hard code the latest as default. And be sure
// to update the env if it is not otherwise set.
const CLI_VERSION = process.env.CLI_VERSION || 'v2.8.0';
const CLI_VERSION = process.env.CLI_VERSION || 'v2.8.1';
process.env.CLI_VERSION = CLI_VERSION;
// Base dir where CLIs will be downloaded into

View File

@@ -15,6 +15,9 @@ describe('launching with a minimal workspace', async () => {
assert(ext);
});
// Note, this test will only pass in pristine workspaces. This means that when run locally and you
// reuse an existing workspace that starts with an open ql file, this test will fail. There is
// no need to make any changes since this will still pass on CI.
it('should not activate the extension at first', () => {
assert(ext!.isActive === false);
});

View File

@@ -0,0 +1,11 @@
/**
* @name MRVA Integration test 1
* @kind problem
* @problem.severity warning
* @id javascript/integration-test-1
*/
import javascript
from MemberDeclaration md
where md.getName() = "dispose"
select md, "Dispose method"

View File

@@ -0,0 +1,11 @@
/**
* @name MRVA Integration test 2
* @kind problem
* @problem.severity warning
* @id javascript/integration-test-2
*/
import javascript
from MemberDeclaration md
where md.getName() = "refresh"
select md, "Refresh method"

View File

@@ -0,0 +1,16 @@
"md","col1"
"dispose ... ();\n }","Dispose method"
"readonl ... > void;","Dispose method"
"async d ... }\n }","Dispose method"
"dispose(): any;","Dispose method"
"public ... }\n }","Dispose method"
"dispose: () => void;","Dispose method"
"dispose ... ');\n }","Dispose method"
"dispose ... ();\n }","Dispose method"
"public ... ();\n }","Dispose method"
"readonl ... > void;","Dispose method"
"dispose(): unknown","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
1 md col1
2 dispose ... ();\n } Dispose method
3 readonl ... > void; Dispose method
4 async d ... }\n } Dispose method
5 dispose(): any; Dispose method
6 public ... }\n } Dispose method
7 dispose: () => void; Dispose method
8 dispose ... ');\n } Dispose method
9 dispose ... ();\n } Dispose method
10 public ... ();\n } Dispose method
11 readonl ... > void; Dispose method
12 dispose(): unknown Dispose method
13 dispose ... inonSpy Dispose method
14 dispose ... inonSpy Dispose method
15 dispose ... inonSpy Dispose method
16 dispose ... inonSpy Dispose method

View File

@@ -0,0 +1,19 @@
## github/vscode-codeql
| - | Message |
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
| [dispose &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/cli.ts#L211) | Dispose method |
| [readonl &#46;&#46;&#46; &#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/extension.ts#L166) | Dispose method |
| [async d &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/logging.ts#L151) | Dispose method |
| [dispose&#40;&#41;: any;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L5) | Dispose method |
| [public &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L65) | Dispose method |
| [dispose: &#40;&#41; =&#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-results.ts#L54) | Dispose method |
| [dispose &#46;&#46;&#46; '&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/queryserver-client.ts#L32) | Dispose method |
| [dispose &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/telemetry.ts#L129) | Dispose method |
| [public &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/test-ui.ts#L54) | Dispose method |
| [readonl &#46;&#46;&#46; &#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/run-queries.ts#L327) | Dispose method |
| [dispose&#40;&#41;: unknown](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/vscode-tests/no-workspace/helpers.test.ts#L150) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L12) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L13) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L14) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L15) | Dispose method |

View File

@@ -0,0 +1,16 @@
"md","col1"
"dispose ... ();\n }","Dispose method"
"readonl ... > void;","Dispose method"
"async d ... }\n }","Dispose method"
"dispose(): any;","Dispose method"
"public ... }\n }","Dispose method"
"dispose: () => void;","Dispose method"
"dispose ... ');\n }","Dispose method"
"dispose ... ();\n }","Dispose method"
"public ... ();\n }","Dispose method"
"readonl ... > void;","Dispose method"
"dispose(): unknown","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
"dispose ... inonSpy","Dispose method"
1 md col1
2 dispose ... ();\n } Dispose method
3 readonl ... > void; Dispose method
4 async d ... }\n } Dispose method
5 dispose(): any; Dispose method
6 public ... }\n } Dispose method
7 dispose: () => void; Dispose method
8 dispose ... ');\n } Dispose method
9 dispose ... ();\n } Dispose method
10 public ... ();\n } Dispose method
11 readonl ... > void; Dispose method
12 dispose(): unknown Dispose method
13 dispose ... inonSpy Dispose method
14 dispose ... inonSpy Dispose method
15 dispose ... inonSpy Dispose method
16 dispose ... inonSpy Dispose method

View File

@@ -0,0 +1,19 @@
## github/vscode-codeql
| - | Message |
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
| [dispose &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/cli.ts#L211) | Dispose method |
| [readonl &#46;&#46;&#46; &#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/extension.ts#L166) | Dispose method |
| [async d &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/logging.ts#L151) | Dispose method |
| [dispose&#40;&#41;: any;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L5) | Dispose method |
| [public &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/pure/disposable-object.ts#L65) | Dispose method |
| [dispose: &#40;&#41; =&#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-results.ts#L54) | Dispose method |
| [dispose &#46;&#46;&#46; '&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/queryserver-client.ts#L32) | Dispose method |
| [dispose &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/telemetry.ts#L129) | Dispose method |
| [public &#46;&#46;&#46; &#40;&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/test-ui.ts#L54) | Dispose method |
| [readonl &#46;&#46;&#46; &#62; void;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/run-queries.ts#L327) | Dispose method |
| [dispose&#40;&#41;: unknown](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/vscode-tests/no-workspace/helpers.test.ts#L150) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L12) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L13) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L14) | Dispose method |
| [dispose &#46;&#46;&#46; inonSpy](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/test/pure-tests/disposable-object.test.ts#L15) | Dispose method |

View File

@@ -0,0 +1,29 @@
{
"executionEndTime": 1645645080281,
"analysisSummaries": [
{
"nwo": "github/vscode-codeql",
"resultCount": 15,
"fileSizeInBytes": 191025,
"downloadLink": {
"id": "171543249",
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/171543249",
"innerFilePath": "results.sarif",
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
}
},
{
"nwo": "other/hucairz",
"resultCount": 15,
"fileSizeInBytes": 191025,
"downloadLink": {
"id": "11111111",
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/11111111",
"innerFilePath": "results.sarif",
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
}
}
],
"analysisFailures": [],
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
}

View File

@@ -0,0 +1,17 @@
{
"queryName": "MRVA Integration test 1",
"queryFilePath": "PLACEHOLDER/q0.ql",
"queryText": "/**\n * @name MRVA Integration test 1\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-1\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"dispose\"\nselect md, \"Dispose method\"\n",
"controllerRepository": {
"owner": "dsp-testing",
"name": "qc-run2"
},
"repositories": [
{
"owner": "github",
"name": "vscode-codeql"
}
],
"executionStartTime": 1645644967533,
"actionsWorkflowRunId": 1889315769
}

View File

@@ -0,0 +1,6 @@
"md","col1"
"refresh ... d);\n }","Refresh method"
"refresh ... <void>;","Refresh method"
"public ... }\n }","Refresh method"
"public ... }\n }","Refresh method"
"refresh ... d);\n }","Refresh method"
1 md col1
2 refresh ... d);\n } Refresh method
3 refresh ... <void>; Refresh method
4 public ... }\n } Refresh method
5 public ... }\n } Refresh method
6 refresh ... d);\n } Refresh method

View File

@@ -0,0 +1,9 @@
## github/vscode-codeql
| - | Message |
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------- |
| [refresh &#46;&#46;&#46; d&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/astViewer.ts#L58) | Refresh method |
| [refresh &#46;&#46;&#46; &#60;void&#62;;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/databases.ts#L234) | Refresh method |
| [public &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/databases.ts#L354) | Refresh method |
| [public &#46;&#46;&#46; &#125;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/discovery.ts#L21) | Refresh method |
| [refresh &#46;&#46;&#46; d&#41;;&#92;n &#125;](https://github.com/github/vscode-codeql/blob/c943c89fc694a06e95845c0b7b7c4e71983dd8c4/extensions/ql-vscode/src/query-history.ts#L268) | Refresh method |

View File

@@ -0,0 +1,18 @@
{
"executionEndTime": 1645645150738,
"analysisSummaries": [
{
"nwo": "github/vscode-codeql",
"resultCount": 5,
"fileSizeInBytes": 81237,
"downloadLink": {
"id": "171544171",
"urlPath": "/repos/avocado-corp/hucairz/actions/artifacts/171544171",
"innerFilePath": "results.sarif",
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
}
}
],
"analysisFailures": [],
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
}

View File

@@ -0,0 +1,17 @@
{
"queryName": "MRVA Integration test 2",
"queryFilePath": "PLACEHOLDER/q1.ql",
"queryText": "/**\n * @name MRVA Integration test 2\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-2\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"refresh\"\nselect md, \"Refresh method\"\n",
"controllerRepository": {
"owner": "dsp-testing",
"name": "qc-run2"
},
"repositories": [
{
"owner": "github",
"name": "vscode-codeql"
}
],
"executionStartTime": 1645644973911,
"actionsWorkflowRunId": 1889316048
}

View File

@@ -0,0 +1,53 @@
{
"version": 1,
"queries": [
{
"t": "remote",
"status": "Completed",
"completed": true,
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx",
"label": "MRVA Integration test 1",
"remoteQuery": {
"queryName": "MRVA Integration test 1",
"queryFilePath": "PLACEHOLDER/q0.ql",
"queryText": "/**\n * @name MRVA Integration test 1\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-1\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"dispose\"\nselect md, \"Dispose method\"\n",
"controllerRepository": {
"owner": "dsp-testing",
"name": "qc-run2"
},
"repositories": [
{
"owner": "github",
"name": "vscode-codeql"
}
],
"executionStartTime": 1645644967533,
"actionsWorkflowRunId": 1889315769
}
},
{
"t": "remote",
"status": "Completed",
"completed": true,
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN",
"label": "MRVA Integration test 2",
"remoteQuery": {
"queryName": "MRVA Integration test 2",
"queryFilePath": "PLACEHOLDER/q1.ql",
"queryText": "/**\n * @name MRVA Integration test 2\n * @kind problem\n * @problem.severity warning\n * @id javascript/integration-test-2\n */\nimport javascript\n\nfrom MemberDeclaration md\nwhere md.getName() = \"refresh\"\nselect md, \"Refresh method\"\n",
"controllerRepository": {
"owner": "dsp-testing",
"name": "qc-run2"
},
"repositories": [
{
"owner": "github",
"name": "vscode-codeql"
}
],
"executionStartTime": 1645644973911,
"actionsWorkflowRunId": 1889316048
}
}
]
}

View File

@@ -1,24 +1,31 @@
import * as fs from 'fs-extra';
import * as path from 'path';
import * as chai from 'chai';
import 'mocha';
import 'sinon-chai';
import * as vscode from 'vscode';
import * as sinon from 'sinon';
import * as chaiAsPromised from 'chai-as-promised';
import { logger } from '../../logging';
import { registerQueryHistoryScubber } from '../../query-history-scrubber';
import { QueryHistoryManager, HistoryTreeDataProvider, SortOrder } from '../../query-history';
import { QueryEvaluationInfo, QueryWithResults } from '../../run-queries';
import { QueryHistoryConfigListener } from '../../config';
import * as messages from '../../pure/messages';
import { QueryServerClient } from '../../queryserver-client';
import { FullQueryInfo, InitialQueryInfo } from '../../query-results';
import { LocalQueryInfo, InitialQueryInfo } from '../../query-results';
import { DatabaseManager } from '../../databases';
import * as tmp from 'tmp-promise';
import { ONE_DAY_IN_MS, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS, THREE_HOURS_IN_MS } from '../../pure/helpers-pure';
import { tmpDir } from '../../helpers';
chai.use(chaiAsPromised);
const expect = chai.expect;
const assert = chai.assert;
describe('query-history', () => {
const mockExtensionLocation = path.join(tmpDir.name, 'mock-extension-location');
let configListener: QueryHistoryConfigListener;
let showTextDocumentSpy: sinon.SinonStub;
let showInformationMessageSpy: sinon.SinonStub;
@@ -100,7 +107,7 @@ describe('query-history', () => {
});
});
let allHistory: FullQueryInfo[];
let allHistory: LocalQueryInfo[];
beforeEach(() => {
allHistory = [
@@ -200,15 +207,11 @@ describe('query-history', () => {
expect(queryHistoryManager.treeDataProvider.getCurrent()).to.be.undefined;
});
it('should throw if there is no selection', async () => {
it('should do nothing if there is no selection', async () => {
queryHistoryManager = await createMockQueryHistory(allHistory);
try {
await queryHistoryManager.handleItemClicked(undefined!, []);
expect(true).to.be.false;
} catch (e) {
expect(selectedCallback).not.to.have.been.called;
expect(e.message).to.contain('No query selected');
}
await queryHistoryManager.handleItemClicked(undefined!, []);
expect(selectedCallback).not.to.have.been.called;
expect(queryHistoryManager.treeDataProvider.getCurrent()).to.be.undefined;
});
});
@@ -312,7 +315,7 @@ describe('query-history', () => {
describe('HistoryTreeDataProvider', () => {
let historyTreeDataProvider: HistoryTreeDataProvider;
beforeEach(() => {
historyTreeDataProvider = new HistoryTreeDataProvider(vscode.Uri.file('/a/b/c').fsPath);
historyTreeDataProvider = new HistoryTreeDataProvider(vscode.Uri.file(mockExtensionLocation).fsPath);
});
afterEach(() => {
@@ -327,29 +330,30 @@ describe('query-history', () => {
title: 'Query History Item',
command: 'codeQLQueryHistory.itemClicked',
arguments: [mockQuery],
tooltip: mockQuery.label,
});
expect(treeItem.label).to.contain('hucairz');
expect(treeItem.contextValue).to.eq('rawResultsItem');
expect(treeItem.iconPath).to.deep.eq(vscode.Uri.file('/a/b/c/media/drive.svg').fsPath);
expect(treeItem.iconPath).to.deep.eq(vscode.Uri.file(mockExtensionLocation + '/media/drive.svg').fsPath);
});
it('should get a tree item with interpreted results', async () => {
const mockQuery = createMockFullQueryInfo('a', createMockQueryWithResults(true, /* interpreted results */ true));
const treeItem = await historyTreeDataProvider.getTreeItem(mockQuery);
expect(treeItem.contextValue).to.eq('interpretedResultsItem');
expect(treeItem.iconPath).to.deep.eq(vscode.Uri.file('/a/b/c/media/drive.svg').fsPath);
expect(treeItem.iconPath).to.deep.eq(vscode.Uri.file(mockExtensionLocation + '/media/drive.svg').fsPath);
});
it('should get a tree item that did not complete successfully', async () => {
const mockQuery = createMockFullQueryInfo('a', createMockQueryWithResults(false), false);
const treeItem = await historyTreeDataProvider.getTreeItem(mockQuery);
expect(treeItem.iconPath).to.eq(vscode.Uri.file('/a/b/c/media/red-x.svg').fsPath);
expect(treeItem.iconPath).to.eq(vscode.Uri.file(mockExtensionLocation + '/media/red-x.svg').fsPath);
});
it('should get a tree item that failed before creating any results', async () => {
const mockQuery = createMockFullQueryInfo('a', undefined, true);
const treeItem = await historyTreeDataProvider.getTreeItem(mockQuery);
expect(treeItem.iconPath).to.eq(vscode.Uri.file('/a/b/c/media/red-x.svg').fsPath);
expect(treeItem.iconPath).to.eq(vscode.Uri.file(mockExtensionLocation + '/media/red-x.svg').fsPath);
});
it('should get a tree item that is in progress', async () => {
@@ -512,20 +516,23 @@ describe('query-history', () => {
},
completedQuery: {
resultCount,
}
},
t: 'local'
};
}
});
function createMockFullQueryInfo(dbName = 'a', queryWitbResults?: QueryWithResults, isFail = false): FullQueryInfo {
const fqi = new FullQueryInfo(
function createMockFullQueryInfo(dbName = 'a', queryWitbResults?: QueryWithResults, isFail = false): LocalQueryInfo {
const fqi = new LocalQueryInfo(
{
databaseInfo: { name: dbName },
start: new Date(),
queryPath: 'hucairz'
} as InitialQueryInfo,
configListener,
{} as vscode.CancellationTokenSource
{
dispose: () => { /**/ },
} as vscode.CancellationTokenSource
);
if (queryWitbResults) {
@@ -537,32 +544,211 @@ describe('query-history', () => {
return fqi;
}
describe('query history scrubber', () => {
let clock: sinon.SinonFakeTimers;
let deregister: vscode.Disposable | undefined;
let mockCtx: vscode.ExtensionContext;
let runCount = 0;
// We don't want our times to align exactly with the hour,
// so we can better mimic real life
const LESS_THAN_ONE_DAY = ONE_DAY_IN_MS - 1000;
const tmpDir = tmp.dirSync({
unsafeCleanup: true
});
beforeEach(() => {
clock = sandbox.useFakeTimers({
toFake: ['setInterval', 'Date']
});
mockCtx = {
globalState: {
lastScrubTime: Date.now(),
get(key: string) {
if (key !== 'lastScrubTime') {
throw new Error(`Unexpected key: ${key}`);
}
return this.lastScrubTime;
},
async update(key: string, value: any) {
if (key !== 'lastScrubTime') {
throw new Error(`Unexpected key: ${key}`);
}
this.lastScrubTime = value;
}
}
} as any as vscode.ExtensionContext;
});
afterEach(() => {
clock.restore();
if (deregister) {
deregister.dispose();
deregister = undefined;
}
});
it('should not throw an error when the query directory does not exist', async function() {
// because of the waits, we need to have a higher timeout on this test.
this.timeout(5000);
registerScrubber('idontexist');
clock.tick(ONE_HOUR_IN_MS);
await wait();
expect(runCount, 'Should not have called the scrubber').to.eq(0);
clock.tick(ONE_HOUR_IN_MS - 1);
await wait();
expect(runCount, 'Should not have called the scrubber').to.eq(0);
clock.tick(1);
await wait();
expect(runCount, 'Should have called the scrubber once').to.eq(1);
clock.tick(TWO_HOURS_IN_MS);
await wait();
expect(runCount, 'Should have called the scrubber a second time').to.eq(2);
expect((mockCtx.globalState as any).lastScrubTime).to.eq(TWO_HOURS_IN_MS * 2, 'Should have scrubbed the last time at 4 hours.');
});
it('should scrub directories', async function() {
this.timeout(5000);
// create two query directories that are right around the cut off time
const queryDir = createMockQueryDir(ONE_HOUR_IN_MS, TWO_HOURS_IN_MS, THREE_HOURS_IN_MS);
registerScrubber(queryDir);
clock.tick(TWO_HOURS_IN_MS);
await wait();
// should have deleted only the invalid locations
expectDirectories(
queryDir,
toQueryDirName(ONE_HOUR_IN_MS),
toQueryDirName(TWO_HOURS_IN_MS),
toQueryDirName(THREE_HOURS_IN_MS),
);
clock.tick(LESS_THAN_ONE_DAY);
await wait();
// nothing should have happened...yet
expectDirectories(
queryDir,
toQueryDirName(ONE_HOUR_IN_MS),
toQueryDirName(TWO_HOURS_IN_MS),
toQueryDirName(THREE_HOURS_IN_MS),
);
clock.tick(1000);
await wait();
// should have deleted the two older directories
// even though they have different time stamps,
// they both expire during the same scrubbing period
expectDirectories(
queryDir,
toQueryDirName(THREE_HOURS_IN_MS),
);
// Wait until the next scrub time and the final directory is deleted
clock.tick(TWO_HOURS_IN_MS);
await wait();
// should have deleted everything
expectDirectories(
queryDir
);
});
function expectDirectories(queryDir: string, ...dirNames: string[]) {
const files = fs.readdirSync(queryDir);
expect(files.sort()).to.deep.eq(dirNames.sort());
}
function createMockQueryDir(...timestamps: number[]) {
const dir = tmpDir.name;
const queryDir = path.join(dir, 'query');
// create qyuery directory and fill it with some query directories
fs.mkdirSync(queryDir);
// create an invalid file
const invalidFile = path.join(queryDir, 'invalid.txt');
fs.writeFileSync(invalidFile, 'invalid');
// create a directory without a timestamp file
const noTimestampDir = path.join(queryDir, 'noTimestampDir');
fs.mkdirSync(noTimestampDir);
fs.writeFileSync(path.join(noTimestampDir, 'invalid.txt'), 'invalid');
// create a directory with a timestamp file, but is invalid
const invalidTimestampDir = path.join(queryDir, 'invalidTimestampDir');
fs.mkdirSync(invalidTimestampDir);
fs.writeFileSync(path.join(invalidTimestampDir, 'timestamp'), 'invalid');
// create a directories with a valid timestamp files from the args
timestamps.forEach((timestamp) => {
const dir = path.join(queryDir, toQueryDirName(timestamp));
fs.mkdirSync(dir);
fs.writeFileSync(path.join(dir, 'timestamp'), `${Date.now() + timestamp}`);
});
return queryDir;
}
function toQueryDirName(timestamp: number) {
return `query-${timestamp}`;
}
function registerScrubber(dir: string) {
deregister = registerQueryHistoryScubber(
ONE_HOUR_IN_MS,
TWO_HOURS_IN_MS,
LESS_THAN_ONE_DAY,
dir,
mockCtx,
{
increment: () => runCount++
}
);
}
async function wait(ms = 500) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
});
function createMockQueryWithResults(didRunSuccessfully = true, hasInterpretedResults = true): QueryWithResults {
return {
query: {
hasInterpretedResults: () => Promise.resolve(hasInterpretedResults)
} as QueryEvaluationInfo,
hasInterpretedResults: () => Promise.resolve(hasInterpretedResults),
deleteQuery: sandbox.stub(),
} as unknown as QueryEvaluationInfo,
result: {
resultType: didRunSuccessfully
? messages.QueryResultType.SUCCESS
: messages.QueryResultType.OTHER_ERROR
} as messages.EvaluationResult,
dispose: sandbox.spy(),
dispose: sandbox.spy()
};
}
async function createMockQueryHistory(allHistory: FullQueryInfo[]) {
async function createMockQueryHistory(allHistory: LocalQueryInfo[]) {
const qhm = new QueryHistoryManager(
{} as QueryServerClient,
{} as DatabaseManager,
'xxx',
{
globalStorageUri: vscode.Uri.file(mockExtensionLocation),
extensionPath: vscode.Uri.file('/x/y/z').fsPath,
} as vscode.ExtensionContext,
configListener,
selectedCallback,
doCompareCallback
);
qhm.onWillOpenQueryItem(selectedCallback);
(qhm.treeDataProvider as any).history = [...allHistory];
await vscode.workspace.saveAll();
qhm.refreshTreeView();
await qhm.refreshTreeView();
return qhm;
}
});

View File

@@ -5,14 +5,15 @@ import 'mocha';
import 'sinon-chai';
import * as sinon from 'sinon';
import * as chaiAsPromised from 'chai-as-promised';
import { FullQueryInfo, InitialQueryInfo, interpretResultsSarif } from '../../query-results';
import { queriesDir, QueryEvaluationInfo, QueryWithResults, tmpDir } from '../../run-queries';
import { LocalQueryInfo, InitialQueryInfo, interpretResultsSarif } from '../../query-results';
import { QueryEvaluationInfo, QueryWithResults } from '../../run-queries';
import { QueryHistoryConfig } from '../../config';
import { EvaluationResult, QueryResultType } from '../../pure/messages';
import { DatabaseInfo, SortDirection, SortedResultSetInfo } from '../../pure/interface-types';
import { CodeQLCliServer, SourceInfo } from '../../cli';
import { env } from 'process';
import { CancellationTokenSource, Uri } from 'vscode';
import { CancellationTokenSource, Uri, env } from 'vscode';
import { tmpDir } from '../../helpers';
import { slurpQueryHistory, splatQueryHistory } from '../../query-serialization';
chai.use(chaiAsPromised);
const expect = chai.expect;
@@ -22,12 +23,15 @@ describe('query-results', () => {
let onDidChangeQueryHistoryConfigurationSpy: sinon.SinonSpy;
let mockConfig: QueryHistoryConfig;
let sandbox: sinon.SinonSandbox;
let queryPath: string;
let cnt = 0;
beforeEach(() => {
sandbox = sinon.createSandbox();
disposeSpy = sandbox.spy();
onDidChangeQueryHistoryConfigurationSpy = sandbox.spy();
mockConfig = mockQueryHistoryConfig();
queryPath = path.join(Uri.file(tmpDir.name).fsPath, `query-${cnt++}`);
});
afterEach(() => {
@@ -40,6 +44,7 @@ describe('query-results', () => {
const date = new Date('2022-01-01T00:00:00.000Z');
const dateStr = date.toLocaleString(env.language);
(fqi.initialInfo as any).start = date;
expect(fqi.interpolate('xxx')).to.eq('xxx');
expect(fqi.interpolate('%t %q %d %s %%')).to.eq(`${dateStr} hucairz a in progress %`);
expect(fqi.interpolate('%t %q %d %s %%::%t %q %d %s %%')).to.eq(`${dateStr} hucairz a in progress %::${dateStr} hucairz a in progress %`);
@@ -51,7 +56,7 @@ describe('query-results', () => {
// from the query path
expect(fqi.getQueryName()).to.eq('hucairz');
fqi.completeThisQuery(createMockQueryWithResults());
fqi.completeThisQuery(createMockQueryWithResults(queryPath));
// from the metadata
expect(fqi.getQueryName()).to.eq('vwx');
@@ -92,7 +97,7 @@ describe('query-results', () => {
// the %q from the config is now replaced by the name of the query
// in the metadata
fqi.completeThisQuery(createMockQueryWithResults());
fqi.completeThisQuery(createMockQueryWithResults(queryPath));
expect(fqi.label).to.eq('from config vwx');
// replace the config with a user specified label
@@ -102,9 +107,10 @@ describe('query-results', () => {
});
it('should get the getResultsPath', () => {
const fqi = createMockFullQueryInfo('a', createMockQueryWithResults());
const query = createMockQueryWithResults(queryPath);
const fqi = createMockFullQueryInfo('a', query);
const completedQuery = fqi.completedQuery!;
const expectedResultsPath = path.join(queriesDir, 'some-id/results.bqrs');
const expectedResultsPath = path.join(queryPath, 'results.bqrs');
// from results path
expect(completedQuery.getResultsPath('zxa', false)).to.eq(expectedResultsPath);
@@ -121,7 +127,7 @@ describe('query-results', () => {
});
it('should get the statusString', () => {
const fqi = createMockFullQueryInfo('a', createMockQueryWithResults(false));
const fqi = createMockFullQueryInfo('a', createMockQueryWithResults(queryPath, false));
const completedQuery = fqi.completedQuery!;
completedQuery.result.message = 'Tremendously';
@@ -146,7 +152,7 @@ describe('query-results', () => {
it('should updateSortState', async () => {
// setup
const fqi = createMockFullQueryInfo('a', createMockQueryWithResults());
const fqi = createMockFullQueryInfo('a', createMockQueryWithResults(queryPath));
const completedQuery = fqi.completedQuery!;
const spy = sandbox.spy();
@@ -162,8 +168,8 @@ describe('query-results', () => {
await completedQuery.updateSortState(mockServer, 'a-result-set-name', sortState);
// verify
const expectedResultsPath = path.join(queriesDir, 'some-id/results.bqrs');
const expectedSortedResultsPath = path.join(queriesDir, 'some-id/sortedResults-a-result-set-name.bqrs');
const expectedResultsPath = path.join(queryPath, 'results.bqrs');
const expectedSortedResultsPath = path.join(queryPath, 'sortedResults-a-result-set-name.bqrs');
expect(spy).to.have.been.calledWith(
expectedResultsPath,
expectedSortedResultsPath,
@@ -215,7 +221,7 @@ describe('query-results', () => {
// Try again, but with no id
spy.reset();
spy.returns({a: '1234'});
spy.returns({ a: '1234' });
delete metadata.id;
const results2 = await interpretResultsSarif(
mockServer,
@@ -248,28 +254,46 @@ describe('query-results', () => {
});
describe('splat and slurp', () => {
// TODO also add a test for round trip starting from file
it('should splat and slurp query history', async () => {
const infoSuccessRaw = createMockFullQueryInfo('a', createMockQueryWithResults(false, false, '/a/b/c/a', false));
const infoSuccessInterpreted = createMockFullQueryInfo('b', createMockQueryWithResults(true, true, '/a/b/c/b', false));
const infoEarlyFailure = createMockFullQueryInfo('c', undefined, true);
const infoLateFailure = createMockFullQueryInfo('d', createMockQueryWithResults(false, false, '/a/b/c/d', false));
const infoInprogress = createMockFullQueryInfo('e');
const allHistory = [
let infoSuccessRaw: LocalQueryInfo;
let infoSuccessInterpreted: LocalQueryInfo;
let infoEarlyFailure: LocalQueryInfo;
let infoLateFailure: LocalQueryInfo;
let infoInprogress: LocalQueryInfo;
let allHistory: LocalQueryInfo[];
beforeEach(() => {
infoSuccessRaw = createMockFullQueryInfo('a', createMockQueryWithResults(`${queryPath}-a`, false, false, '/a/b/c/a', false));
infoSuccessInterpreted = createMockFullQueryInfo('b', createMockQueryWithResults(`${queryPath}-b`, true, true, '/a/b/c/b', false));
infoEarlyFailure = createMockFullQueryInfo('c', undefined, true);
infoLateFailure = createMockFullQueryInfo('d', createMockQueryWithResults(`${queryPath}-c`, false, false, '/a/b/c/d', false));
infoInprogress = createMockFullQueryInfo('e');
allHistory = [
infoSuccessRaw,
infoSuccessInterpreted,
infoEarlyFailure,
infoLateFailure,
infoInprogress
];
});
const allHistoryPath = path.join(queriesDir, 'all-history.json');
await FullQueryInfo.splat(allHistory, allHistoryPath);
const allHistoryActual = await FullQueryInfo.slurp(allHistoryPath, mockConfig);
it('should splat and slurp query history', async () => {
// the expected results only contains the history with completed queries
const expectedHistory = [
infoSuccessRaw,
infoSuccessInterpreted,
infoLateFailure,
];
const allHistoryPath = path.join(tmpDir.name, 'workspace-query-history.json');
// splat and slurp
await splatQueryHistory(allHistory, allHistoryPath);
const allHistoryActual = await slurpQueryHistory(allHistoryPath, mockConfig);
// the dispose methods will be different. Ignore them.
allHistoryActual.forEach(info => {
if (info.completedQuery) {
if (info.t === 'local' && info.completedQuery) {
const completedQuery = info.completedQuery;
(completedQuery as any).dispose = undefined;
@@ -287,7 +311,7 @@ describe('query-results', () => {
}
}
});
allHistory.forEach(info => {
expectedHistory.forEach(info => {
if (info.completedQuery) {
(info.completedQuery as any).dispose = undefined;
}
@@ -295,16 +319,39 @@ describe('query-results', () => {
// make the diffs somewhat sane by comparing each element directly
for (let i = 0; i < allHistoryActual.length; i++) {
expect(allHistoryActual[i]).to.deep.eq(allHistory[i]);
expect(allHistoryActual[i]).to.deep.eq(expectedHistory[i]);
}
expect(allHistoryActual.length).to.deep.eq(allHistory.length);
expect(allHistoryActual.length).to.deep.eq(expectedHistory.length);
});
it('should handle an invalid query history version', async () => {
const badPath = path.join(tmpDir.name, 'bad-query-history.json');
fs.writeFileSync(badPath, JSON.stringify({
version: 2,
queries: allHistory
}), 'utf8');
const allHistoryActual = await slurpQueryHistory(badPath, mockConfig);
// version number is invalid. Should return an empty array.
expect(allHistoryActual).to.deep.eq([]);
});
});
function createMockQueryWithResults(
queryPath: string,
didRunSuccessfully = true,
hasInterpretedResults = true,
dbPath = '/a/b/c',
includeSpies = true
): QueryWithResults {
// pretend that the results path exists
const resultsPath = path.join(queryPath, 'results.bqrs');
fs.mkdirpSync(queryPath);
fs.writeFileSync(resultsPath, '', 'utf8');
function createMockQueryWithResults(didRunSuccessfully = true, hasInterpretedResults = true, dbPath = '/a/b/c', includeSpies = true): QueryWithResults {
const query = new QueryEvaluationInfo('some-id',
Uri.file(dbPath).fsPath, // parse the Uri to make sure it is platform-independent
const query = new QueryEvaluationInfo(
queryPath,
Uri.file(dbPath).fsPath,
true,
'queryDbscheme',
undefined,
@@ -331,8 +378,8 @@ describe('query-results', () => {
return result;
}
function createMockFullQueryInfo(dbName = 'a', queryWitbResults?: QueryWithResults, isFail = false): FullQueryInfo {
const fqi = new FullQueryInfo(
function createMockFullQueryInfo(dbName = 'a', queryWitbResults?: QueryWithResults, isFail = false): LocalQueryInfo {
const fqi = new LocalQueryInfo(
{
databaseInfo: {
name: dbName,
@@ -346,7 +393,9 @@ describe('query-results', () => {
id: `some-id-${dbName}`,
} as InitialQueryInfo,
mockQueryHistoryConfig(),
{} as CancellationTokenSource
{
dispose: () => { /**/ },
} as CancellationTokenSource
);
if (queryWitbResults) {
@@ -361,6 +410,7 @@ describe('query-results', () => {
function mockQueryHistoryConfig(): QueryHistoryConfig {
return {
onDidChangeConfiguration: onDidChangeQueryHistoryConfigurationSpy,
ttlInMillis: 999999,
format: 'from config %q'
};
}

View File

@@ -0,0 +1,344 @@
import * as fs from 'fs-extra';
import * as path from 'path';
import * as sinon from 'sinon';
import * as chai from 'chai';
import 'mocha';
import 'sinon-chai';
import * as chaiAsPromised from 'chai-as-promised';
import { CancellationToken, ExtensionContext, Uri, window, workspace } from 'vscode';
import { QueryHistoryConfig } from '../../config';
import { DatabaseManager } from '../../databases';
import { tmpDir } from '../../helpers';
import { QueryHistoryManager } from '../../query-history';
import { QueryServerClient } from '../../queryserver-client';
import { Credentials } from '../../authentication';
import { AnalysesResultsManager } from '../../remote-queries/analyses-results-manager';
import { RemoteQueryResult } from '../../remote-queries/shared/remote-query-result';
import { DisposableBucket } from '../disposable-bucket';
import { testDisposeHandler } from '../test-dispose-handler';
import { walk } from '../directory-walker';
chai.use(chaiAsPromised);
const expect = chai.expect;
/**
* Tests for remote queries and how they interact with the query history manager.
*/
describe('Remote queries and query history manager', function() {
const EXTENSION_PATH = path.join(__dirname, '../../../');
const STORAGE_DIR = Uri.file(path.join(tmpDir.name, 'remote-queries')).fsPath;
const asyncNoop = async () => { /** noop */ };
let sandbox: sinon.SinonSandbox;
let qhm: QueryHistoryManager;
let rawQueryHistory: any;
let remoteQueryResult0: RemoteQueryResult;
let remoteQueryResult1: RemoteQueryResult;
let disposables: DisposableBucket;
let showTextDocumentSpy: sinon.SinonSpy;
let openTextDocumentSpy: sinon.SinonSpy;
beforeEach(() => {
// Since these tests change the state of the query history manager, we need to copy the original
// to a temporary folder where we can manipulate it for tests
copyHistoryState();
});
afterEach(() => {
deleteHistoryState();
});
beforeEach(() => {
sandbox = sinon.createSandbox();
disposables = new DisposableBucket();
rawQueryHistory = fs.readJSONSync(path.join(STORAGE_DIR, 'workspace-query-history.json')).queries;
remoteQueryResult0 = fs.readJSONSync(path.join(STORAGE_DIR, 'queries', rawQueryHistory[0].queryId, 'query-result.json'));
remoteQueryResult1 = fs.readJSONSync(path.join(STORAGE_DIR, 'queries', rawQueryHistory[1].queryId, 'query-result.json'));
qhm = new QueryHistoryManager(
{} as QueryServerClient,
{} as DatabaseManager,
STORAGE_DIR,
{
globalStorageUri: Uri.file(STORAGE_DIR),
extensionPath: EXTENSION_PATH
} as ExtensionContext,
{
onDidChangeConfiguration: () => new DisposableBucket(),
} as unknown as QueryHistoryConfig,
asyncNoop
);
disposables.push(qhm);
showTextDocumentSpy = sandbox.spy(window, 'showTextDocument');
openTextDocumentSpy = sandbox.spy(workspace, 'openTextDocument');
});
afterEach(() => {
disposables.dispose(testDisposeHandler);
sandbox.restore();
});
it('should read query history', async () => {
const spy = sandbox.spy();
disposables.push(qhm.onDidAddQueryItem(spy));
await qhm.readQueryHistory();
// Should have added the query history. Contents are directly from the file
expect(spy.getCall(0).args[0]).to.deep.eq(rawQueryHistory[0]);
expect(spy.getCall(1).args[0]).to.deep.eq(rawQueryHistory[1]);
expect(spy.callCount).to.eq(2);
expect(qhm.treeDataProvider.allHistory[0]).to.deep.eq(rawQueryHistory[0]);
expect(qhm.treeDataProvider.allHistory[1]).to.deep.eq(rawQueryHistory[1]);
expect(qhm.treeDataProvider.allHistory.length).to.eq(2);
});
it('should remove and then add query from history', async () => {
await qhm.readQueryHistory();
const addSpy = sandbox.spy();
disposables.push(qhm.onDidAddQueryItem(addSpy));
const removeSpy = sandbox.spy();
disposables.push(qhm.onDidRemoveQueryItem(removeSpy));
// Remove the first query
await qhm.handleRemoveHistoryItem(qhm.treeDataProvider.allHistory[0]);
expect(removeSpy.getCall(0).args[0]).to.deep.eq(rawQueryHistory[0]);
expect(removeSpy.callCount).to.eq(1);
expect(addSpy.callCount).to.eq(0);
expect(qhm.treeDataProvider.allHistory).to.deep.eq(rawQueryHistory.slice(1));
// Add it back
qhm.addQuery(rawQueryHistory[0]);
expect(removeSpy.callCount).to.eq(1);
expect(addSpy.getCall(0).args[0]).to.deep.eq(rawQueryHistory[0]);
expect(addSpy.callCount).to.eq(1);
expect(qhm.treeDataProvider.allHistory).to.deep.eq([rawQueryHistory[1], rawQueryHistory[0]]);
});
it('should remove two queries from history', async () => {
await qhm.readQueryHistory();
const addSpy = sandbox.spy();
disposables.push(qhm.onDidAddQueryItem(addSpy));
const removeSpy = sandbox.spy();
disposables.push(qhm.onDidRemoveQueryItem(removeSpy));
// Remove the both queries
// Just for fun, let's do it in reverse order
await qhm.handleRemoveHistoryItem(undefined!, [qhm.treeDataProvider.allHistory[1], qhm.treeDataProvider.allHistory[0]]);
expect(removeSpy.getCall(0).args[0]).to.deep.eq(rawQueryHistory[1]);
expect(removeSpy.getCall(1).args[0]).to.deep.eq(rawQueryHistory[0]);
expect(qhm.treeDataProvider.allHistory).to.deep.eq([]);
expect(removeSpy.callCount).to.eq(2);
// also, both queries should be removed from on disk storage
expect(fs.readJSONSync(path.join(STORAGE_DIR, 'workspace-query-history.json'))).to.deep.eq({
version: 1,
queries: []
});
});
it('should handle a click', async () => {
await qhm.readQueryHistory();
const openSpy = sandbox.spy();
disposables.push(qhm.onWillOpenQueryItem(openSpy));
await qhm.handleItemClicked(qhm.treeDataProvider.allHistory[0], []);
expect(openSpy.getCall(0).args[0]).to.deep.eq(rawQueryHistory[0]);
});
it('should get the query text', async () => {
await qhm.readQueryHistory();
await qhm.handleShowQueryText(qhm.treeDataProvider.allHistory[0], []);
expect(showTextDocumentSpy).to.have.been.calledOnce;
expect(openTextDocumentSpy).to.have.been.calledOnce;
const uri: Uri = openTextDocumentSpy.getCall(0).args[0];
expect(uri.scheme).to.eq('codeql');
const params = new URLSearchParams(uri.query);
expect(params.get('isQuickEval')).to.eq('false');
expect(params.get('queryText')).to.eq(rawQueryHistory[0].remoteQuery.queryText);
});
describe('AnalysisResultsManager', () => {
let mockCredentials: any;
let mockOctokit: any;
let mockLogger: any;
let arm: AnalysesResultsManager;
beforeEach(() => {
mockOctokit = {
request: sandbox.stub()
};
mockCredentials = {
getOctokit: () => mockOctokit
};
mockLogger = {
log: sandbox.spy()
};
sandbox.stub(Credentials, 'initialize').resolves(mockCredentials);
arm = new AnalysesResultsManager(
{} as ExtensionContext,
path.join(STORAGE_DIR, 'queries'),
mockLogger
);
});
it('should avoid re-downloading an analysis result', async () => {
// because the analysis result is already in on disk, it should not be downloaded
const publisher = sandbox.spy();
const analysisSummary = remoteQueryResult0.analysisSummaries[0];
await arm.downloadAnalysisResults(analysisSummary, publisher);
// Should not have made the request since the analysis result is already on disk
expect(mockOctokit.request).to.not.have.been.called;
// result should have been published twice
// first time, it is in progress
expect(publisher.getCall(0).args[0][0]).to.include({
nwo: 'github/vscode-codeql',
status: 'InProgress',
// results: ... avoid checking the results object since it is complex
});
// second time, it has the path to the sarif file.
expect(publisher.getCall(1).args[0][0]).to.include({
nwo: 'github/vscode-codeql',
status: 'Completed',
// results: ... avoid checking the results object since it is complex
});
expect(publisher).to.have.been.calledTwice;
// result should be stored in the manager
expect(arm.getAnalysesResults(rawQueryHistory[0].queryId)[0]).to.include({
nwo: 'github/vscode-codeql',
status: 'Completed',
// results: ... avoid checking the results object since it is complex
});
publisher.resetHistory();
// now, let's try to download it again. This time, since it's already in memory,
// it should not even be re-published
await arm.downloadAnalysisResults(analysisSummary, publisher);
expect(publisher).to.not.have.been.called;
});
it('should download two artifacts at once', async () => {
const publisher = sandbox.spy();
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
await arm.downloadAnalysesResults(analysisSummaries, undefined, publisher);
const trimmed = publisher.getCalls().map(call => call.args[0]).map(args => {
args.forEach((analysisResult: any) => delete analysisResult.results);
return args;
});
// As before, but now both summaries should have been published
expect(trimmed[0]).to.deep.eq([{
nwo: 'github/vscode-codeql',
status: 'InProgress',
}]);
expect(trimmed[1]).to.deep.eq([{
nwo: 'github/vscode-codeql',
status: 'InProgress',
}, {
nwo: 'other/hucairz',
status: 'InProgress',
}]);
// there is a third call. It is non-deterministic if
// github/vscode-codeql is completed first or other/hucairz is.
// There is not much point in trying to test it if the other calls are correct.
expect(trimmed[3]).to.deep.eq([{
nwo: 'github/vscode-codeql',
status: 'Completed',
}, {
nwo: 'other/hucairz',
status: 'Completed',
}]);
expect(publisher).to.have.callCount(4);
});
it('should avoid publishing when the request is cancelled', async () => {
const publisher = sandbox.spy();
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
try {
await arm.downloadAnalysesResults(analysisSummaries, {
isCancellationRequested: true
} as CancellationToken, publisher);
expect.fail('Should have thrown');
} catch (e) {
expect(e.message).to.contain('cancelled');
}
expect(publisher).not.to.have.been.called;
});
it('should get the analysis results', async () => {
const publisher = sandbox.spy();
const analysisSummaries0 = [...remoteQueryResult0.analysisSummaries];
const analysisSummaries1 = [...remoteQueryResult1.analysisSummaries];
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
await arm.downloadAnalysesResults(analysisSummaries1, undefined, publisher);
const result0 = arm.getAnalysesResults(rawQueryHistory[0].queryId);
const result0Again = arm.getAnalysesResults(rawQueryHistory[0].queryId);
// Shoule be equal, but not equivalent
expect(result0).to.deep.eq(result0Again);
expect(result0).not.to.eq(result0Again);
const result1 = arm.getAnalysesResults(rawQueryHistory[1].queryId);
const result1Again = arm.getAnalysesResults(rawQueryHistory[1].queryId);
expect(result1).to.deep.eq(result1Again);
expect(result1).not.to.eq(result1Again);
});
// This test is failing on windows in CI.
it.skip('should read sarif', async () => {
const publisher = sandbox.spy();
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0]];
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
const sarif = fs.readJSONSync(path.join(STORAGE_DIR, 'queries', rawQueryHistory[0].queryId, '171543249', 'results.sarif'));
const queryResults = sarif.runs
.flatMap((run: any) => run.results)
.map((result: any) => ({ message: result.message.text }));
expect(publisher.getCall(1).args[0][0].results).to.deep.eq(queryResults);
});
});
function copyHistoryState() {
fs.ensureDirSync(STORAGE_DIR);
fs.copySync(path.join(__dirname, 'data/remote-queries/'), path.join(tmpDir.name, 'remote-queries'));
// also, replace the files with "PLACEHOLDER" so that they have the correct directory
for (const p of walk(STORAGE_DIR)) {
replacePlaceholder(path.join(p));
}
}
function deleteHistoryState() {
fs.removeSync(STORAGE_DIR);
}
function replacePlaceholder(filePath: string) {
if (filePath.endsWith('.json')) {
const newContents = fs.readFileSync(filePath, 'utf8').replaceAll('PLACEHOLDER', STORAGE_DIR.replaceAll('\\', '/'));
fs.writeFileSync(filePath, newContents, 'utf8');
}
}
});

View File

@@ -5,7 +5,7 @@ import 'sinon-chai';
import * as sinon from 'sinon';
import * as chaiAsPromised from 'chai-as-promised';
import { QueryEvaluationInfo, queriesDir } from '../../run-queries';
import { QueryEvaluationInfo } from '../../run-queries';
import { Severity, compileQuery } from '../../pure/messages';
import { Uri } from 'vscode';
@@ -14,13 +14,13 @@ const expect = chai.expect;
describe('run-queries', () => {
it('should create a QueryEvaluationInfo', () => {
const info = createMockQueryInfo();
const saveDir = 'query-save-dir';
const info = createMockQueryInfo(true, saveDir);
const queryId = info.id;
expect(info.compiledQueryPath).to.eq(path.join(queriesDir, queryId, 'compiledQuery.qlo'));
expect(info.dilPath).to.eq(path.join(queriesDir, queryId, 'results.dil'));
expect(info.resultsPaths.resultsPath).to.eq(path.join(queriesDir, queryId, 'results.bqrs'));
expect(info.resultsPaths.interpretedResultsPath).to.eq(path.join(queriesDir, queryId, 'interpretedResults.sarif'));
expect(info.compiledQueryPath).to.eq(path.join(saveDir, 'compiledQuery.qlo'));
expect(info.dilPath).to.eq(path.join(saveDir, 'results.dil'));
expect(info.resultsPaths.resultsPath).to.eq(path.join(saveDir, 'results.bqrs'));
expect(info.resultsPaths.interpretedResultsPath).to.eq(path.join(saveDir, 'interpretedResults.sarif'));
expect(info.dbItemPath).to.eq(Uri.file('/abc').fsPath);
});
@@ -90,9 +90,9 @@ describe('run-queries', () => {
});
let queryNum = 0;
function createMockQueryInfo(databaseHasMetadataFile = true) {
function createMockQueryInfo(databaseHasMetadataFile = true, saveDir = `save-dir${queryNum++}`) {
return new QueryEvaluationInfo(
`save-dir${queryNum++}`,
saveDir,
Uri.parse('file:///abc').fsPath,
databaseHasMetadataFile,
'my-scheme', // queryDbscheme,

View File

@@ -114,11 +114,10 @@ describe('run-remote-query', function() {
let mod: any;
const error = {
message: 'Unable to run query on the specified repositories. Some repositories were invalid or don\'t have database uploads enabled.',
message: 'Unable to run query on the specified repositories. Some repositories were invalid.',
response: {
data: {
invalid_repos: ['abc/def', 'ghi/jkl'],
repos_without_db_uploads: ['mno/pqr', 'stu/vwx']
invalid_repos: ['abc/def', 'ghi/jkl']
}
}
};
@@ -151,7 +150,7 @@ describe('run-remote-query', function() {
});
it('should return and log error if it can\'t run on any repos', async () => {
const repositories = ['abc/def', 'ghi/jkl', 'mno/pqr', 'stu/vwx'];
const repositories = ['abc/def', 'ghi/jkl'];
// make the function call
await mod.attemptRerun(error, credentials, ref, language, repositories, query, owner, repo);
@@ -159,12 +158,11 @@ describe('run-remote-query', function() {
// check logging output
expect(logSpy.firstCall.args[0]).to.contain('Unable to run query');
expect(logSpy.secondCall.args[0]).to.contain('Invalid repos: abc/def, ghi/jkl');
expect(logSpy.thirdCall.args[0]).to.contain('Repos without DB uploads: mno/pqr, stu/vwx');
expect(showAndLogErrorMessageSpy.firstCall.args[0]).to.contain('Unable to run query on any');
});
it('should list invalid repos and repos without DB uploads, and rerun on valid ones', async () => {
const repositories = ['foo/bar', 'abc/def', 'ghi/jkl', 'mno/pqr', 'foo/baz'];
it('should list invalid repos and rerun on valid ones', async () => {
const repositories = ['foo/bar', 'abc/def', 'ghi/jkl', 'foo/baz'];
// fake return values
showInformationMessageWithActionSpy.resolves(true);
@@ -175,7 +173,6 @@ describe('run-remote-query', function() {
// check logging output
expect(logSpy.firstCall.args[0]).to.contain('Unable to run query');
expect(logSpy.secondCall.args[0]).to.contain('Invalid repos: abc/def, ghi/jkl');
expect(logSpy.thirdCall.args[0]).to.contain('Repos without DB uploads: mno/pqr');
// check that the correct information message is displayed
expect(showInformationMessageWithActionSpy.firstCall.args[0]).to.contain('Unable to run query on some');
@@ -198,8 +195,4 @@ describe('run-remote-query', function() {
};
}
});
describe('runRemoteQuery', () => {
// TODO
});
});

View File

@@ -6,7 +6,7 @@
"module": "commonjs",
"target": "es2017",
"outDir": "out",
"lib": ["ES2020"],
"lib": ["ES2021"],
"moduleResolution": "node",
"sourceMap": true,
"rootDir": "src",