Merge pull request #1111 from github/aeisenberg/graph-viewer
More work on the graph viewer
This commit is contained in:
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '10.18.1'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
2199
extensions/ql-vscode/package-lock.json
generated
2199
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -48,6 +48,7 @@
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
"onCommand:codeQL.viewCfg",
|
||||
"onCommand:codeQL.openReferencedFile",
|
||||
"onCommand:codeQL.previewQueryHelp",
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
@@ -374,6 +375,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"title": "CodeQL: View AST"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"title": "CodeQL: View CFG"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.upgradeCurrentDatabase",
|
||||
"title": "CodeQL: Upgrade Current Database"
|
||||
@@ -743,6 +748,11 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && !explorerResourceIsFolder && !listMultiSelection"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"group": "9_qlCommands",
|
||||
@@ -804,6 +814,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -950,6 +964,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.quickEval",
|
||||
"when": "editorLangId == ql"
|
||||
@@ -1023,6 +1041,8 @@
|
||||
"@primer/react": "^34.3.0",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"d3": "^6.3.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^9.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
@@ -1054,6 +1074,8 @@
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
|
||||
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* The d3 library is designed to work in both the browser and
|
||||
* node. Consequently their typings files refer to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the affected types so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type RequestInit = Record<string, unknown>;
|
||||
declare type ElementTagNameMap = any;
|
||||
declare type NodeListOf<T> = Record<string, T>;
|
||||
declare type Node = Record<string, unknown>;
|
||||
declare type XMLDocument = Record<string, unknown>;
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as cpp from 'child-process-promise';
|
||||
import * as child_process from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as sarif from 'sarif';
|
||||
import { SemVer } from 'semver';
|
||||
@@ -17,7 +18,7 @@ import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
import { dbSchemeToLanguage } from './helpers';
|
||||
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||
|
||||
/**
|
||||
* The version of the SARIF format that we are using.
|
||||
@@ -687,20 +688,13 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
||||
}
|
||||
|
||||
async runInterpretCommand(format: string, metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
async runInterpretCommand(format: string, additonalArgs: string[], metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
const args = [
|
||||
'--output', interpretedResultsPath,
|
||||
'--format', format,
|
||||
// Forward all of the query metadata.
|
||||
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
||||
];
|
||||
if (format == SARIF_FORMAT) {
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
args.push('--no-group-results');
|
||||
}
|
||||
].concat(additonalArgs);
|
||||
if (sourceInfo !== undefined) {
|
||||
args.push(
|
||||
'--source-archive', sourceInfo.sourceArchive,
|
||||
@@ -722,13 +716,47 @@ export class CodeQLCliServer implements Disposable {
|
||||
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
||||
}
|
||||
|
||||
async interpretBqrs(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
await this.runInterpretCommand(SARIF_FORMAT, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
async interpretBqrsSarif(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
const additionalArgs = [
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
'--no-group-results'
|
||||
];
|
||||
|
||||
await this.runInterpretCommand(SARIF_FORMAT, additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return await sarifParser(interpretedResultsPath);
|
||||
}
|
||||
|
||||
// Warning: this function is untenable for large dot files,
|
||||
async readDotFiles(dir: string): Promise<string[]> {
|
||||
const dotFiles: Promise<string>[] = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
if (file.endsWith('.dot')) {
|
||||
dotFiles.push(fs.readFile(file, 'utf8'));
|
||||
}
|
||||
}
|
||||
return Promise.all(dotFiles);
|
||||
}
|
||||
|
||||
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
|
||||
const additionalArgs = sourceInfo
|
||||
? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}']
|
||||
: [];
|
||||
|
||||
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
|
||||
try {
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${err.stderr || err}`);
|
||||
}
|
||||
}
|
||||
|
||||
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
||||
await this.runInterpretCommand(CSV_FORMAT, metadata, resultsPath, csvPath, sourceInfo);
|
||||
await this.runInterpretCommand(CSV_FORMAT, [], metadata, resultsPath, csvPath, sourceInfo);
|
||||
}
|
||||
|
||||
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
||||
@@ -1224,9 +1252,9 @@ export class CliVersionConstraint {
|
||||
|
||||
/**
|
||||
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
|
||||
* on a per-query server basis.
|
||||
* on a per-query server basis.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
|
||||
@@ -95,8 +95,8 @@ const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_Q
|
||||
|
||||
/** When these settings change, the running query server should be restarted. */
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [
|
||||
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||
];
|
||||
|
||||
export interface QueryServerConfig {
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum KeyType {
|
||||
DefinitionQuery = 'DefinitionQuery',
|
||||
ReferenceQuery = 'ReferenceQuery',
|
||||
PrintAstQuery = 'PrintAstQuery',
|
||||
PrintCfgQuery = 'PrintCfgQuery',
|
||||
}
|
||||
|
||||
export function tagOfKeyType(keyType: KeyType): string {
|
||||
@@ -12,6 +13,8 @@ export function tagOfKeyType(keyType: KeyType): string {
|
||||
return 'ide-contextual-queries/local-references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'ide-contextual-queries/print-ast';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'ide-contextual-queries/print-cfg';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +26,8 @@ export function nameOfKeyType(keyType: KeyType): string {
|
||||
return 'references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'print AST';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'print CFG';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +37,7 @@ export function kindOfKeyType(keyType: KeyType): string {
|
||||
case KeyType.ReferenceQuery:
|
||||
return 'definitions';
|
||||
case KeyType.PrintAstQuery:
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'graph';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,3 +230,62 @@ export class TemplatePrintAstProvider {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class TemplatePrintCfgProvider {
|
||||
private cache: CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private dbm: DatabaseManager,
|
||||
) {
|
||||
this.cache = new CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>(this.getCfgUri.bind(this));
|
||||
}
|
||||
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, messages.TemplateDefinitions] | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
return await this.cache.get(document.uri.toString());
|
||||
}
|
||||
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, messages.TemplateDefinitions]> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||
}
|
||||
|
||||
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
|
||||
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
|
||||
if (!db) {
|
||||
throw new Error('Can\'t infer database from the provided source.');
|
||||
}
|
||||
|
||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||
if (!qlpack) {
|
||||
throw new Error('Can\'t infer qlpack from database source archive.');
|
||||
}
|
||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
|
||||
if (queries.length > 1) {
|
||||
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
if (queries.length === 0) {
|
||||
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
|
||||
const queryUri = Uri.file(queries[0]);
|
||||
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [queryUri, templates];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,8 @@ import { DatabaseUI } from './databases-ui';
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider
|
||||
} from './contextual/templateProvider';
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -1047,7 +1048,8 @@ async function activateWithInstalledDistribution(
|
||||
);
|
||||
|
||||
const astViewer = new AstViewer();
|
||||
const templateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
|
||||
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
|
||||
const cfgTemplateProvider = new TemplatePrintCfgProvider(cliServer, dbm);
|
||||
|
||||
ctx.subscriptions.push(astViewer);
|
||||
ctx.subscriptions.push(commandRunnerWithProgress('codeQL.viewAst', async (
|
||||
@@ -1055,7 +1057,7 @@ async function activateWithInstalledDistribution(
|
||||
token: CancellationToken,
|
||||
selectedFile: Uri
|
||||
) => {
|
||||
const ast = await templateProvider.provideAst(
|
||||
const ast = await printAstTemplateProvider.provideAst(
|
||||
progress,
|
||||
token,
|
||||
selectedFile ?? window.activeTextEditor?.document.uri,
|
||||
@@ -1068,6 +1070,25 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Calculate AST'
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.viewCfg',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const res = await cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
|
||||
if (res) {
|
||||
await compileAndRunQuery(false, res[0], progress, token, undefined);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Calculating Control Flow Graph',
|
||||
cancellable: true
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
await commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
|
||||
void logger.log('Successfully finished extension initialization.');
|
||||
|
||||
@@ -558,3 +558,25 @@ export async function createTimestampFile(storagePath: string) {
|
||||
await fs.ensureDir(storagePath);
|
||||
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively walk a directory and return the full path to all files found.
|
||||
* Symbolic links are ignored.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await fs.opendir(dir)) {
|
||||
const entry = path.join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
} else if (d.isFile()) {
|
||||
yield entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,12 +27,13 @@ import {
|
||||
InterpretedResultsSortState,
|
||||
SortDirection,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
RawResultsSortState,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { CompletedQueryInfo, interpretResults } from './query-results';
|
||||
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||
import {
|
||||
@@ -88,12 +89,36 @@ function sortInterpretedResults(
|
||||
}
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet): number {
|
||||
return Math.ceil(resultSet.schema.rows / PAGE_SIZE.getValue<number>());
|
||||
function interpretedPageSize(interpretation: Interpretation | undefined): number {
|
||||
if (interpretation?.data.t == 'GraphInterpretationData') {
|
||||
// Graph views always have one result per page.
|
||||
return 1;
|
||||
}
|
||||
return PAGE_SIZE.getValue<number>();
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation?.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: resultSet.schema.rows;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
||||
return Math.ceil((interpretation?.sarif.runs[0].results?.length || 0) / PAGE_SIZE.getValue<number>());
|
||||
if (!interpretation) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: interpretation.data.runs[0].results?.length || 0;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
export class InterfaceManager extends DisposableObject {
|
||||
@@ -181,6 +206,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
() => {
|
||||
this._panel = undefined;
|
||||
this._displayedQuery = undefined;
|
||||
this._panelLoaded = false;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
@@ -305,7 +331,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME) {
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
@@ -438,7 +464,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
const parsedResultSets: ParsedResultSets = {
|
||||
pageNumber: 0,
|
||||
pageSize,
|
||||
numPages: numPagesOfResultSet(resultSet),
|
||||
numPages: numPagesOfResultSet(resultSet, this._interpretation),
|
||||
numInterpretedPages: numInterpretedPages(this._interpretation),
|
||||
resultSet: { ...resultSet, t: 'RawResultSet' },
|
||||
selectedTable: undefined,
|
||||
@@ -474,7 +500,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
if (this._interpretation === undefined) {
|
||||
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
||||
}
|
||||
if (this._interpretation.sarif.runs[0].results === undefined) {
|
||||
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
|
||||
throw new Error('Trying to show interpreted results but results were undefined');
|
||||
}
|
||||
|
||||
@@ -488,7 +514,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
metadata: this._displayedQuery.completedQuery.query.metadata,
|
||||
pageNumber,
|
||||
resultSetNames,
|
||||
pageSize: PAGE_SIZE.getValue(),
|
||||
pageSize: interpretedPageSize(this._interpretation),
|
||||
numPages: numInterpretedPages(this._interpretation),
|
||||
queryName: this._displayedQuery.label,
|
||||
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||
@@ -591,28 +617,45 @@ export class InterfaceManager extends DisposableObject {
|
||||
void this.logger.log('No results path. Cannot display interpreted results.');
|
||||
return undefined;
|
||||
}
|
||||
let data;
|
||||
let numTotalResults;
|
||||
if (metadata?.kind === GRAPH_TABLE_NAME) {
|
||||
data = await interpretGraphResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
numTotalResults = data.dot.length;
|
||||
} else {
|
||||
const sarif = await interpretResultsSarif(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
|
||||
const sarif = await interpretResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results !== undefined) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
sarif.sortState = sortState;
|
||||
data = sarif;
|
||||
|
||||
const numTotalResults = sarif.runs[0]?.results?.length || 0;
|
||||
numTotalResults = (() => {
|
||||
return sarif.runs?.[0]?.results
|
||||
? sarif.runs[0].results.length
|
||||
: 0;
|
||||
})();
|
||||
}
|
||||
|
||||
const interpretation: Interpretation = {
|
||||
sarif,
|
||||
data,
|
||||
sourceLocationPrefix,
|
||||
numTruncatedResults: 0,
|
||||
numTotalResults,
|
||||
sortState,
|
||||
numTotalResults
|
||||
};
|
||||
this._interpretation = interpretation;
|
||||
return interpretation;
|
||||
@@ -621,7 +664,6 @@ export class InterfaceManager extends DisposableObject {
|
||||
private getPageOfInterpretedResults(
|
||||
pageNumber: number
|
||||
): Interpretation {
|
||||
|
||||
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
||||
return {
|
||||
...run, results: run.results?.slice(
|
||||
@@ -631,16 +673,24 @@ export class InterfaceManager extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
if (this._interpretation === undefined) {
|
||||
const interp = this._interpretation;
|
||||
if (interp === undefined) {
|
||||
throw new Error('Tried to get interpreted results before interpretation finished');
|
||||
}
|
||||
if (this._interpretation.sarif.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${this._interpretation.sarif.runs.length} runs, expected 1`);
|
||||
|
||||
if (interp.data.t !== 'SarifInterpretationData')
|
||||
return interp;
|
||||
|
||||
if (interp.data.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
|
||||
}
|
||||
const interp = this._interpretation;
|
||||
|
||||
return {
|
||||
...interp,
|
||||
sarif: { ...interp.sarif, runs: [getPageOfRun(interp.sarif.runs[0])] },
|
||||
data: {
|
||||
...interp.data,
|
||||
runs: [getPageOfRun(interp.data.runs[0])]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -730,9 +780,12 @@ export class InterfaceManager extends DisposableObject {
|
||||
interpretation: Interpretation,
|
||||
databaseItem: DatabaseItem
|
||||
): Promise<void> {
|
||||
const { sarif, sourceLocationPrefix } = interpretation;
|
||||
const { data, sourceLocationPrefix } = interpretation;
|
||||
|
||||
if (!sarif.runs || !sarif.runs[0].results) {
|
||||
if (data.t !== 'SarifInterpretationData')
|
||||
return;
|
||||
|
||||
if (!data.runs || !data.runs[0].results) {
|
||||
void this.logger.log(
|
||||
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
||||
);
|
||||
@@ -741,7 +794,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
||||
|
||||
for (const result of sarif.runs[0].results) {
|
||||
for (const result of data.runs[0].results) {
|
||||
const message = result.message.text;
|
||||
if (message === undefined) {
|
||||
void this.logger.log('Sarif had result without plaintext message');
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
/**
|
||||
* helpers-pure.ts
|
||||
* ------------
|
||||
|
||||
@@ -10,15 +10,17 @@ import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationVal
|
||||
|
||||
export const SELECT_TABLE_NAME = '#select';
|
||||
export const ALERTS_TABLE_NAME = 'alerts';
|
||||
export const GRAPH_TABLE_NAME = 'graph';
|
||||
|
||||
export type RawTableResultSet = { t: 'RawResultSet' } & RawResultSet;
|
||||
export type PathTableResultSet = {
|
||||
t: 'SarifResultSet';
|
||||
export type InterpretedResultSet<T> = {
|
||||
t: 'InterpretedResultSet';
|
||||
readonly schema: ResultSetSchema;
|
||||
name: string;
|
||||
} & Interpretation;
|
||||
interpretation: InterpretationT<T>;
|
||||
};
|
||||
|
||||
export type ResultSet = RawTableResultSet | PathTableResultSet;
|
||||
export type ResultSet = RawTableResultSet | InterpretedResultSet<InterpretationData>;
|
||||
|
||||
/**
|
||||
* Only ever show this many rows in a raw result table.
|
||||
@@ -46,18 +48,31 @@ export interface PreviousExecution {
|
||||
durationSeconds: number;
|
||||
}
|
||||
|
||||
export interface Interpretation {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
export type SarifInterpretationData = {
|
||||
t: 'SarifInterpretationData';
|
||||
/**
|
||||
* sortState being undefined means don't sort, just present results in the order
|
||||
* they appear in the sarif file.
|
||||
*/
|
||||
sortState?: InterpretedResultsSortState;
|
||||
sarif: sarif.Log;
|
||||
} & sarif.Log;
|
||||
|
||||
export type GraphInterpretationData = {
|
||||
t: 'GraphInterpretationData';
|
||||
dot: string[];
|
||||
};
|
||||
|
||||
export type InterpretationData = SarifInterpretationData | GraphInterpretationData;
|
||||
|
||||
export interface InterpretationT<T> {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export type Interpretation = InterpretationT<InterpretationData>;
|
||||
|
||||
export interface ResultsPaths {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
@@ -357,8 +372,9 @@ export function getDefaultResultSetName(
|
||||
// Choose first available result set from the array
|
||||
return [
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
SELECT_TABLE_NAME,
|
||||
resultSetNames[0],
|
||||
resultSetNames[0]
|
||||
].filter((resultSetName) => resultSetNames.includes(resultSetName))[0];
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import { CancellationTokenSource, env } from 'vscode';
|
||||
import { QueryWithResults, QueryEvaluationInfo } from './run-queries';
|
||||
import * as messages from './pure/messages';
|
||||
import * as cli from './cli';
|
||||
import * as sarif from 'sarif';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
@@ -11,7 +10,9 @@ import {
|
||||
SortedResultSetInfo,
|
||||
QueryMetadata,
|
||||
InterpretedResultsSortState,
|
||||
ResultsPaths
|
||||
ResultsPaths,
|
||||
SarifInterpretationData,
|
||||
GraphInterpretationData
|
||||
} from './pure/interface-types';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
@@ -151,19 +152,39 @@ export class CompletedQueryInfo implements QueryWithResults {
|
||||
|
||||
|
||||
/**
|
||||
* Call cli command to interpret results.
|
||||
* Call cli command to interpret SARIF results.
|
||||
*/
|
||||
export async function interpretResults(
|
||||
server: cli.CodeQLCliServer,
|
||||
export async function interpretResultsSarif(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<sarif.Log> {
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
return JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8'));
|
||||
return { ...JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8')), t: 'SarifInterpretationData' };
|
||||
}
|
||||
return await server.interpretBqrs(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
const res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { ...res, t: 'SarifInterpretationData' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Call cli command to interpret graph results.
|
||||
*/
|
||||
export async function interpretGraphResults(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
const dot = await cli.interpretBqrsGraph(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
@@ -181,7 +202,6 @@ export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Used in Interface and Compare-Interface for queries that we know have been complated.
|
||||
*/
|
||||
|
||||
@@ -174,7 +174,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||
args.push('--evaluator-log');
|
||||
args.push(`${this.opts.contextStoragePath}/structured-evaluator-log.json`);
|
||||
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
|
||||
@@ -86,7 +86,11 @@ export class QueryEvaluationInfo {
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, 'results.bqrs'),
|
||||
interpretedResultsPath: path.join(this.querySaveDir, 'interpretedResults.sarif'),
|
||||
interpretedResultsPath: path.join(this.querySaveDir,
|
||||
this.metadata?.kind === 'graph'
|
||||
? 'graphResults'
|
||||
: 'interpretedResults.sarif'
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -202,16 +206,21 @@ export class QueryEvaluationInfo {
|
||||
return false;
|
||||
}
|
||||
|
||||
const hasKind = !!this.metadata?.kind;
|
||||
const kind = this.metadata?.kind;
|
||||
const hasKind = !!kind;
|
||||
if (!hasKind) {
|
||||
void logger.log('Cannot produce interpreted results since the query does not have @kind metadata.');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Graph queries only return interpreted results if we are in canary mode.
|
||||
if (kind === 'graph') {
|
||||
return config.isCanary();
|
||||
}
|
||||
|
||||
// table is the default query kind. It does not produce interpreted results.
|
||||
// any query kind that is not table can, in principle, produce interpreted results.
|
||||
const isTable = hasKind && this.metadata?.kind === 'table';
|
||||
return !isTable;
|
||||
return kind !== 'table';
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,7 +5,7 @@ import * as Keys from '../pure/result-keys';
|
||||
import * as octicons from './octicons';
|
||||
import { className, renderLocation, ResultTableProps, zebraStripe, selectableZebraStripe, jumpToLocation, nextSortDirection, emptyQueryResultsMessage } from './result-table-utils';
|
||||
import { onNavigation, NavigationEvent } from './results';
|
||||
import { PathTableResultSet } from '../pure/interface-types';
|
||||
import { InterpretedResultSet, SarifInterpretationData } from '../pure/interface-types';
|
||||
import {
|
||||
parseSarifPlainTextMessage,
|
||||
parseSarifLocation,
|
||||
@@ -15,7 +15,7 @@ import { InterpretedResultsSortColumn, SortDirection, InterpretedResultsSortStat
|
||||
import { vscode } from './vscode-api';
|
||||
import { isWholeFileLoc, isLineColumnLoc } from '../pure/bqrs-utils';
|
||||
|
||||
export type PathTableProps = ResultTableProps & { resultSet: PathTableResultSet };
|
||||
export type PathTableProps = ResultTableProps & { resultSet: InterpretedResultSet<SarifInterpretationData> };
|
||||
export interface PathTableState {
|
||||
expanded: { [k: string]: boolean };
|
||||
selectedPathNode: undefined | Keys.PathNode;
|
||||
@@ -51,7 +51,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
}
|
||||
|
||||
sortClass(column: InterpretedResultsSortColumn): string {
|
||||
const sortState = this.props.resultSet.sortState;
|
||||
const sortState = this.props.resultSet.interpretation.data.sortState;
|
||||
if (sortState !== undefined && sortState.sortBy === column) {
|
||||
return sortState.sortDirection === SortDirection.asc ? 'sort-asc' : 'sort-desc';
|
||||
}
|
||||
@@ -61,7 +61,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
}
|
||||
|
||||
getNextSortState(column: InterpretedResultsSortColumn): InterpretedResultsSortState | undefined {
|
||||
const oldSortState = this.props.resultSet.sortState;
|
||||
const oldSortState = this.props.resultSet.interpretation.data.sortState;
|
||||
const prevDirection = oldSortState && oldSortState.sortBy === column ? oldSortState.sortDirection : undefined;
|
||||
const nextDirection = nextSortDirection(prevDirection, true);
|
||||
return nextDirection === undefined ? undefined :
|
||||
@@ -94,7 +94,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
</thead>;
|
||||
|
||||
const rows: JSX.Element[] = [];
|
||||
const { numTruncatedResults, sourceLocationPrefix } = resultSet;
|
||||
const { numTruncatedResults, sourceLocationPrefix } = resultSet.interpretation;
|
||||
|
||||
function renderRelatedLocations(msg: string, relatedLocations: Sarif.Location[]): JSX.Element[] {
|
||||
const relatedLocationsById: { [k: string]: Sarif.Location } = {};
|
||||
@@ -188,13 +188,13 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return (e) => this.toggle(e, indices);
|
||||
};
|
||||
|
||||
if (!resultSet.sarif.runs?.[0]?.results?.length) {
|
||||
if (!resultSet.interpretation.data.runs?.[0]?.results?.length) {
|
||||
return this.renderNoResults();
|
||||
}
|
||||
|
||||
let expansionIndex = 0;
|
||||
|
||||
resultSet.sarif.runs[0].results.forEach((result, resultIndex) => {
|
||||
resultSet.interpretation.data.runs[0].results.forEach((result, resultIndex) => {
|
||||
const text = result.message.text || '[no text]';
|
||||
const msg: JSX.Element[] =
|
||||
result.relatedLocations === undefined ?
|
||||
@@ -307,7 +307,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const { selectedPathNode } = prevState;
|
||||
if (selectedPathNode === undefined) return prevState;
|
||||
|
||||
const path = Keys.getPath(this.props.resultSet.sarif, selectedPathNode);
|
||||
const path = Keys.getPath(this.props.resultSet.interpretation.data, selectedPathNode);
|
||||
if (path === undefined) return prevState;
|
||||
|
||||
const nextIndex = selectedPathNode.pathNodeIndex + event.direction;
|
||||
@@ -318,7 +318,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return prevState;
|
||||
}
|
||||
|
||||
const loc = parseSarifLocation(sarifLoc, this.props.resultSet.sourceLocationPrefix);
|
||||
const loc = parseSarifLocation(sarifLoc, this.props.resultSet.interpretation.sourceLocationPrefix);
|
||||
if (isNoLocation(loc)) {
|
||||
return prevState;
|
||||
}
|
||||
|
||||
100
extensions/ql-vscode/src/view/graph.tsx
Normal file
100
extensions/ql-vscode/src/view/graph.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import * as React from 'react';
|
||||
import * as d3 from 'd3';
|
||||
import { ResultTableProps } from './result-table-utils';
|
||||
import { InterpretedResultSet, GraphInterpretationData } from '../pure/interface-types';
|
||||
import { graphviz } from 'd3-graphviz';
|
||||
import { jumpToLocation } from './result-table-utils';
|
||||
import { tryGetLocationFromString } from '../pure/bqrs-utils';
|
||||
export type GraphProps = ResultTableProps & { resultSet: InterpretedResultSet<GraphInterpretationData> };
|
||||
|
||||
const graphClassName = 'vscode-codeql__result-tables-graph';
|
||||
const graphId = 'graph-results';
|
||||
export class Graph extends React.Component<GraphProps> {
|
||||
constructor(props: GraphProps) {
|
||||
super(props);
|
||||
}
|
||||
|
||||
public render = (): JSX.Element => {
|
||||
const { resultSet, offset } = this.props;
|
||||
const graphData = resultSet.interpretation?.data?.dot[offset];
|
||||
|
||||
if (!graphData) {
|
||||
return <>
|
||||
<div className={graphClassName}>Graph is not available.</div>
|
||||
</>;
|
||||
}
|
||||
|
||||
return <>
|
||||
<div className={graphClassName}>
|
||||
<strong>Warning:</strong> The Graph Viewer is not a publicly released feature and will crash on large graphs.
|
||||
</div>
|
||||
<div id={graphId} className={graphClassName}><span>Rendering graph...</span></div>
|
||||
</>;
|
||||
};
|
||||
|
||||
public componentDidMount = () => {
|
||||
this.renderGraph();
|
||||
};
|
||||
|
||||
public componentDidUpdate = () => {
|
||||
this.renderGraph();
|
||||
};
|
||||
|
||||
private renderGraph = () => {
|
||||
const { databaseUri, resultSet, offset } = this.props;
|
||||
const graphData = resultSet.interpretation?.data?.dot[offset];
|
||||
|
||||
if (!graphData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options = {
|
||||
fit: true,
|
||||
fade: false,
|
||||
growEnteringEdges: false,
|
||||
zoom: true,
|
||||
};
|
||||
|
||||
const element = document.querySelector(`#${graphId}`);
|
||||
if (!element) {
|
||||
return;
|
||||
}
|
||||
element.firstChild?.remove();
|
||||
|
||||
const color = getComputedStyle(element).color;
|
||||
const backgroundColor = getComputedStyle(element).backgroundColor;
|
||||
const borderColor = getComputedStyle(element).borderColor;
|
||||
let firstPolygon = true;
|
||||
|
||||
graphviz(`#${graphId}`)
|
||||
.options(options)
|
||||
.attributer(function(d) {
|
||||
if (d.tag == 'a') {
|
||||
const url = d.attributes['xlink:href'] || d.attributes['href'];
|
||||
const loc = tryGetLocationFromString(url);
|
||||
if (loc !== undefined) {
|
||||
d.attributes['xlink:href'] = '#';
|
||||
d.attributes['href'] = '#';
|
||||
loc.uri = 'file://' + loc.uri;
|
||||
d3.select(this).on('click', function(e) { jumpToLocation(loc, databaseUri); });
|
||||
}
|
||||
}
|
||||
|
||||
if ('fill' in d.attributes) {
|
||||
d.attributes.fill = d.tag == 'text' ? color : backgroundColor;
|
||||
}
|
||||
if ('stroke' in d.attributes) {
|
||||
// There is no proper way to identify the element containing the graph (which we
|
||||
// don't want a border around), as it is just has tag 'polygon'. Instead we assume
|
||||
// that the first polygon we see is that element
|
||||
if (d.tag != 'polygon' || !firstPolygon) {
|
||||
d.attributes.stroke = borderColor;
|
||||
} else {
|
||||
firstPolygon = false;
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
.renderDot(graphData);
|
||||
};
|
||||
}
|
||||
@@ -8,12 +8,14 @@ import {
|
||||
InterpretedResultsSortState,
|
||||
ResultSet,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
SELECT_TABLE_NAME,
|
||||
getDefaultResultSetName,
|
||||
ParsedResultSets,
|
||||
IntoResultsViewMsg,
|
||||
} from '../pure/interface-types';
|
||||
import { PathTable } from './alert-table';
|
||||
import { Graph } from './graph';
|
||||
import { RawTable } from './raw-results-table';
|
||||
import {
|
||||
ResultTableProps,
|
||||
@@ -61,8 +63,8 @@ function getResultCount(resultSet: ResultSet): number {
|
||||
switch (resultSet.t) {
|
||||
case 'RawResultSet':
|
||||
return resultSet.schema.rows;
|
||||
case 'SarifResultSet':
|
||||
return resultSet.numTotalResults;
|
||||
case 'InterpretedResultSet':
|
||||
return resultSet.interpretation.numTotalResults;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,27 +89,32 @@ export class ResultTables
|
||||
this.props.rawResultSets.map((rs) => ({ t: 'RawResultSet', ...rs }));
|
||||
|
||||
if (this.props.interpretation != undefined) {
|
||||
const tableName = this.getInterpretedTableName();
|
||||
resultSets.push({
|
||||
t: 'SarifResultSet',
|
||||
t: 'InterpretedResultSet',
|
||||
// FIXME: The values of version, columns, tupleCount are
|
||||
// unused stubs because a SarifResultSet schema isn't used the
|
||||
// unused stubs because a InterpretedResultSet schema isn't used the
|
||||
// same way as a RawResultSet. Probably should pull `name` field
|
||||
// out.
|
||||
schema: {
|
||||
name: ALERTS_TABLE_NAME,
|
||||
name: tableName,
|
||||
rows: 1,
|
||||
columns: []
|
||||
},
|
||||
name: ALERTS_TABLE_NAME,
|
||||
...this.props.interpretation,
|
||||
name: tableName,
|
||||
interpretation: this.props.interpretation,
|
||||
});
|
||||
}
|
||||
return resultSets;
|
||||
}
|
||||
|
||||
private getInterpretedTableName(): string {
|
||||
return this.props.interpretation?.data.t === 'GraphInterpretationData' ? GRAPH_TABLE_NAME : ALERTS_TABLE_NAME;
|
||||
}
|
||||
|
||||
private getResultSetNames(): string[] {
|
||||
return this.props.interpretation
|
||||
? this.props.parsedResultSets.resultSetNames.concat([ALERTS_TABLE_NAME])
|
||||
? this.props.parsedResultSets.resultSetNames.concat([this.getInterpretedTableName()])
|
||||
: this.props.parsedResultSets.resultSetNames;
|
||||
}
|
||||
|
||||
@@ -349,8 +356,19 @@ class ResultTable extends React.Component<ResultTableProps, Record<string, never
|
||||
switch (resultSet.t) {
|
||||
case 'RawResultSet': return <RawTable
|
||||
{...this.props} resultSet={resultSet} />;
|
||||
case 'SarifResultSet': return <PathTable
|
||||
{...this.props} resultSet={resultSet} />;
|
||||
case 'InterpretedResultSet': {
|
||||
const data = resultSet.interpretation.data;
|
||||
switch (data.t) {
|
||||
case 'SarifInterpretationData': {
|
||||
const sarifResultSet = { ...resultSet, interpretation: { ...resultSet.interpretation, data } };
|
||||
return <PathTable {...this.props} resultSet={sarifResultSet} />;
|
||||
}
|
||||
case 'GraphInterpretationData': {
|
||||
const grapResultSet = { ...resultSet, interpretation: { ...resultSet.interpretation, data } };
|
||||
return <Graph {...this.props} resultSet={grapResultSet} />;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
QueryMetadata,
|
||||
ResultsPaths,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
ParsedResultSets,
|
||||
} from '../pure/interface-types';
|
||||
import { EventHandlers as EventHandlerList } from './event-handler-list';
|
||||
@@ -104,7 +105,9 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
|
||||
void this.loadResults();
|
||||
break;
|
||||
case 'showInterpretedPage':
|
||||
case 'showInterpretedPage': {
|
||||
const tableName = msg.interpretation.data.t === 'GraphInterpretationData' ? GRAPH_TABLE_NAME : ALERTS_TABLE_NAME;
|
||||
|
||||
this.updateStateWithNewResultsInfo({
|
||||
resultsPath: '', // FIXME: Not used for interpreted, refactor so this is not needed
|
||||
parsedResultSets: {
|
||||
@@ -114,16 +117,16 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
resultSetNames: msg.resultSetNames,
|
||||
pageNumber: msg.pageNumber,
|
||||
resultSet: {
|
||||
t: 'SarifResultSet',
|
||||
name: ALERTS_TABLE_NAME,
|
||||
t: 'InterpretedResultSet',
|
||||
name: tableName,
|
||||
schema: {
|
||||
name: ALERTS_TABLE_NAME,
|
||||
name: tableName,
|
||||
rows: 1,
|
||||
columns: []
|
||||
},
|
||||
...msg.interpretation,
|
||||
interpretation: msg.interpretation,
|
||||
},
|
||||
selectedTable: ALERTS_TABLE_NAME,
|
||||
selectedTable: tableName,
|
||||
},
|
||||
origResultsPaths: undefined as any, // FIXME: Not used for interpreted, refactor so this is not needed
|
||||
sortedResultsMap: new Map(), // FIXME: Not used for interpreted, refactor so this is not needed
|
||||
@@ -136,6 +139,7 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
});
|
||||
void this.loadResults();
|
||||
break;
|
||||
}
|
||||
case 'resultsUpdating':
|
||||
this.setState({
|
||||
isExpectingResultsUpdate: true,
|
||||
@@ -191,7 +195,7 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
const resultSet = parsedResultSets.resultSet;
|
||||
if (!resultSet.t) {
|
||||
throw new Error(
|
||||
'Missing result set type. Should be either "SarifResultSet" or "RawResultSet".'
|
||||
'Missing result set type. Should be either "InterpretedResultSet" or "RawResultSet".'
|
||||
);
|
||||
}
|
||||
return [resultSet];
|
||||
@@ -260,6 +264,8 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
) {
|
||||
const parsedResultSets = displayedResults.resultsInfo.parsedResultSets;
|
||||
const key = (parsedResultSets.selectedTable || '') + parsedResultSets.pageNumber;
|
||||
const data = displayedResults.resultsInfo.interpretation?.data;
|
||||
|
||||
return (
|
||||
<ResultTables
|
||||
key={key}
|
||||
@@ -279,9 +285,7 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
: undefined
|
||||
}
|
||||
sortStates={displayedResults.results.sortStates}
|
||||
interpretedSortState={
|
||||
displayedResults.resultsInfo.interpretation?.sortState
|
||||
}
|
||||
interpretedSortState={data?.t == 'SarifInterpretationData' ? data.sortState : undefined}
|
||||
isLoadingNewResults={
|
||||
this.state.isExpectingResultsUpdate ||
|
||||
this.state.nextResultsInfo !== null
|
||||
@@ -298,6 +302,7 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
componentDidMount(): void {
|
||||
this.vscodeMessageHandler = this.vscodeMessageHandler.bind(this);
|
||||
window.addEventListener('message', this.vscodeMessageHandler);
|
||||
vscode.postMessage({ t: 'resultViewLoaded' });
|
||||
}
|
||||
|
||||
componentWillUnmount(): void {
|
||||
@@ -316,5 +321,3 @@ class App extends React.Component<Record<string, never>, ResultsViewState> {
|
||||
}
|
||||
|
||||
Rdom.render(<App />, document.getElementById('root'));
|
||||
|
||||
vscode.postMessage({ t: 'resultViewLoaded' });
|
||||
|
||||
@@ -134,6 +134,14 @@ select {
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
.vscode-codeql__result-tables-graph {
|
||||
background-color: transparent;
|
||||
border-color: var(--vscode-dropdown-border);
|
||||
color: var(--vscode-editor-foreground);
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.vscode-codeql__result-tables-updating-text {
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
/**
|
||||
* Recursively walk a directory and return the full path to all files found.
|
||||
* Note that this function uses synchronous fs calls, so it should only be used in tests.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export function* walk(dir: string): IterableIterator<string> {
|
||||
const files = fs.readdirSync(dir);
|
||||
for (const file of files) {
|
||||
const filePath = path.join(dir, file);
|
||||
const stat = fs.statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
yield* walk(filePath);
|
||||
} else {
|
||||
yield filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -65,8 +65,6 @@ describe('queryResolver', () => {
|
||||
it('should throw an error when there are no queries found', async () => {
|
||||
mockCli.resolveQueriesInSuite.returns([]);
|
||||
|
||||
// TODO: Figure out why chai-as-promised isn't failing the test on an
|
||||
// unhandled rejection.
|
||||
try {
|
||||
await module.resolveQueries(mockCli, { dbschemePack: 'my-qlpack' }, KeyType.DefinitionQuery);
|
||||
// should reject
|
||||
|
||||
@@ -1,11 +1,23 @@
|
||||
import { expect } from 'chai';
|
||||
import 'mocha';
|
||||
import { EnvironmentVariableCollection, EnvironmentVariableMutator, Event, ExtensionContext, ExtensionMode, Memento, SecretStorage, SecretStorageChangeEvent, Uri, window } from 'vscode';
|
||||
import {
|
||||
EnvironmentVariableCollection,
|
||||
EnvironmentVariableMutator,
|
||||
Event,
|
||||
ExtensionContext,
|
||||
ExtensionMode,
|
||||
Memento,
|
||||
SecretStorage,
|
||||
SecretStorageChangeEvent,
|
||||
Uri,
|
||||
window
|
||||
} from 'vscode';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as tmp from 'tmp';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as sinon from 'sinon';
|
||||
import { DirResult } from 'tmp';
|
||||
|
||||
import {
|
||||
getInitialQueryContents,
|
||||
@@ -13,7 +25,8 @@ import {
|
||||
isLikelyDbLanguageFolder,
|
||||
showBinaryChoiceDialog,
|
||||
showBinaryChoiceWithUrlDialog,
|
||||
showInformationMessageWithAction
|
||||
showInformationMessageWithAction,
|
||||
walkDirectory
|
||||
} from '../../helpers';
|
||||
import { reportStreamProgress } from '../../commandRunner';
|
||||
import Sinon = require('sinon');
|
||||
@@ -377,3 +390,68 @@ describe('helpers', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('walkDirectory', () => {
|
||||
let tmpDir: DirResult;
|
||||
let dir: string;
|
||||
let dir2: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
dir = path.join(tmpDir.name, 'dir');
|
||||
fs.ensureDirSync(dir);
|
||||
dir2 = path.join(tmpDir.name, 'dir2');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
});
|
||||
|
||||
|
||||
it('should walk a directory', async () => {
|
||||
const file1 = path.join(dir, 'file1');
|
||||
const file2 = path.join(dir, 'file2');
|
||||
const file3 = path.join(dir, 'file3');
|
||||
const dir3 = path.join(dir, 'dir3');
|
||||
const file4 = path.join(dir, 'file4');
|
||||
const file5 = path.join(dir, 'file5');
|
||||
const file6 = path.join(dir, 'file6');
|
||||
|
||||
// These symlinks link back to paths that are already existing, so ignore.
|
||||
const symLinkFile7 = path.join(dir, 'symlink0');
|
||||
const symlinkDir = path.join(dir2, 'symlink1');
|
||||
|
||||
// some symlinks that point outside of the base dir.
|
||||
const file8 = path.join(tmpDir.name, 'file8');
|
||||
const file9 = path.join(dir2, 'file8');
|
||||
const symlinkDir2 = path.join(dir2, 'symlink2');
|
||||
const symlinkFile2 = path.join(dir2, 'symlinkFile3');
|
||||
|
||||
fs.ensureDirSync(dir2);
|
||||
fs.ensureDirSync(dir3);
|
||||
|
||||
fs.writeFileSync(file1, 'file1');
|
||||
fs.writeFileSync(file2, 'file2');
|
||||
fs.writeFileSync(file3, 'file3');
|
||||
fs.writeFileSync(file4, 'file4');
|
||||
fs.writeFileSync(file5, 'file5');
|
||||
fs.writeFileSync(file6, 'file6');
|
||||
fs.writeFileSync(file8, 'file8');
|
||||
fs.writeFileSync(file9, 'file9');
|
||||
|
||||
// We don't really need to be testing all of these variants of symlinks,
|
||||
// but it doesn't hurt, and will help us if we ever do decide to support them.
|
||||
fs.symlinkSync(file6, symLinkFile7, 'file');
|
||||
fs.symlinkSync(dir3, symlinkDir, 'dir');
|
||||
fs.symlinkSync(file8, symlinkFile2, 'file');
|
||||
fs.symlinkSync(dir2, symlinkDir2, 'dir');
|
||||
|
||||
const files = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
files.push(file);
|
||||
}
|
||||
|
||||
// Only real files should be returned.
|
||||
expect(files.sort()).to.deep.eq([file1, file2, file3, file4, file5, file6]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { LocalQueryInfo, InitialQueryInfo, interpretResults } from '../../query-results';
|
||||
import { LocalQueryInfo, InitialQueryInfo, interpretResultsSarif } from '../../query-results';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../../run-queries';
|
||||
import { QueryHistoryConfig } from '../../config';
|
||||
import { EvaluationResult, QueryResultType } from '../../pure/messages';
|
||||
@@ -189,11 +189,11 @@ describe('query-results', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should interpretResults', async () => {
|
||||
it('should interpretResultsSarif', async () => {
|
||||
const spy = sandbox.mock();
|
||||
spy.returns('1234');
|
||||
spy.returns({ a: '1234' });
|
||||
const mockServer = {
|
||||
interpretBqrs: spy
|
||||
interpretBqrsSarif: spy
|
||||
} as unknown as CodeQLCliServer;
|
||||
|
||||
const interpretedResultsPath = path.join(tmpDir.name, 'interpreted.json');
|
||||
@@ -204,7 +204,7 @@ describe('query-results', () => {
|
||||
id: 'my-id' as string | undefined,
|
||||
scored: undefined
|
||||
};
|
||||
const results1 = await interpretResults(
|
||||
const results1 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
@@ -213,7 +213,7 @@ describe('query-results', () => {
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
|
||||
expect(results1).to.eq('1234');
|
||||
expect(results1).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
metadata,
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
@@ -221,9 +221,9 @@ describe('query-results', () => {
|
||||
|
||||
// Try again, but with no id
|
||||
spy.reset();
|
||||
spy.returns('1234');
|
||||
spy.returns({ a: '1234' });
|
||||
delete metadata.id;
|
||||
const results2 = await interpretResults(
|
||||
const results2 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
@@ -231,7 +231,7 @@ describe('query-results', () => {
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
expect(results2).to.eq('1234');
|
||||
expect(results2).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
{ kind: 'my-kind', id: 'dummy-id', scored: undefined },
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
@@ -242,7 +242,7 @@ describe('query-results', () => {
|
||||
fs.writeFileSync(interpretedResultsPath, JSON.stringify({
|
||||
a: 6
|
||||
}), 'utf8');
|
||||
const results3 = await interpretResults(
|
||||
const results3 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
@@ -250,7 +250,7 @@ describe('query-results', () => {
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
expect(results3).to.deep.eq({ a: 6 });
|
||||
expect(results3).to.deep.eq({ a: 6, t: 'SarifInterpretationData' });
|
||||
});
|
||||
|
||||
describe('splat and slurp', () => {
|
||||
|
||||
@@ -17,7 +17,7 @@ import { AnalysesResultsManager } from '../../remote-queries/analyses-results-ma
|
||||
import { RemoteQueryResult } from '../../remote-queries/shared/remote-query-result';
|
||||
import { DisposableBucket } from '../disposable-bucket';
|
||||
import { testDisposeHandler } from '../test-dispose-handler';
|
||||
import { walk } from '../directory-walker';
|
||||
import { walkDirectory } from '../../helpers';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
@@ -41,13 +41,19 @@ describe('Remote queries and query history manager', function() {
|
||||
let showTextDocumentSpy: sinon.SinonSpy;
|
||||
let openTextDocumentSpy: sinon.SinonSpy;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async function() {
|
||||
|
||||
// set a higher timeout since recursive delete below may take a while, expecially on Windows.
|
||||
this.timeout(120000);
|
||||
|
||||
// Since these tests change the state of the query history manager, we need to copy the original
|
||||
// to a temporary folder where we can manipulate it for tests
|
||||
copyHistoryState();
|
||||
await copyHistoryState();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
afterEach(function() {
|
||||
// set a higher timeout since recursive delete below may take a while, expecially on Windows.
|
||||
this.timeout(120000);
|
||||
deleteHistoryState();
|
||||
});
|
||||
|
||||
@@ -321,18 +327,23 @@ describe('Remote queries and query history manager', function() {
|
||||
});
|
||||
});
|
||||
|
||||
function copyHistoryState() {
|
||||
async function copyHistoryState() {
|
||||
fs.ensureDirSync(STORAGE_DIR);
|
||||
fs.copySync(path.join(__dirname, 'data/remote-queries/'), path.join(tmpDir.name, 'remote-queries'));
|
||||
|
||||
// also, replace the files with "PLACEHOLDER" so that they have the correct directory
|
||||
for (const p of walk(STORAGE_DIR)) {
|
||||
for await (const p of walkDirectory(STORAGE_DIR)) {
|
||||
replacePlaceholder(path.join(p));
|
||||
}
|
||||
}
|
||||
|
||||
function deleteHistoryState() {
|
||||
fs.removeSync(STORAGE_DIR);
|
||||
fs.rmSync(STORAGE_DIR, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
maxRetries: 10,
|
||||
retryDelay: 100
|
||||
});
|
||||
}
|
||||
|
||||
function replacePlaceholder(filePath: string) {
|
||||
|
||||
@@ -4,15 +4,27 @@ import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
import { QueryEvaluationInfo } from '../../run-queries';
|
||||
import { Severity, compileQuery } from '../../pure/messages';
|
||||
import { Uri } from 'vscode';
|
||||
import * as config from '../../config';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('run-queries', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
|
||||
sandbox.stub(config, 'isCanary').returns(false);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should create a QueryEvaluationInfo', () => {
|
||||
const saveDir = 'query-save-dir';
|
||||
const info = createMockQueryInfo(true, saveDir);
|
||||
@@ -38,6 +50,13 @@ describe('run-queries', () => {
|
||||
|
||||
info.metadata!.kind = 'table';
|
||||
expect(info.canHaveInterpretedResults()).to.eq(false);
|
||||
|
||||
// Graphs are not interpreted unless canary is set
|
||||
info.metadata!.kind = 'graph';
|
||||
expect(info.canHaveInterpretedResults()).to.eq(false);
|
||||
|
||||
(config.isCanary as sinon.SinonStub).returns(true);
|
||||
expect(info.canHaveInterpretedResults()).to.eq(true);
|
||||
});
|
||||
|
||||
describe('compile', () => {
|
||||
@@ -108,7 +127,7 @@ describe('run-queries', () => {
|
||||
config: {
|
||||
timeoutSecs: 5
|
||||
},
|
||||
sendRequest: sinon.stub().returns(new Promise(resolve => {
|
||||
sendRequest: sandbox.stub().returns(new Promise(resolve => {
|
||||
resolve({
|
||||
messages: [
|
||||
{ message: 'err', severity: Severity.ERROR },
|
||||
@@ -117,7 +136,7 @@ describe('run-queries', () => {
|
||||
});
|
||||
})),
|
||||
logger: {
|
||||
log: sinon.spy()
|
||||
log: sandbox.spy()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { fail } from 'assert';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { asyncFilter } from '../../src/pure/helpers-pure';
|
||||
|
||||
describe('helpers-pure', () => {
|
||||
|
||||
Reference in New Issue
Block a user