Merge remote-tracking branch 'origin/main' into koesie10/resolve-data-extensions-editor-queries
This commit is contained in:
33
.github/codeql/queries/assert-pure.ql
vendored
33
.github/codeql/queries/assert-pure.ql
vendored
@@ -1,21 +1,40 @@
|
|||||||
/**
|
/**
|
||||||
* @name Unwanted dependency on vscode API
|
* @name Unwanted dependency on vscode API
|
||||||
* @kind problem
|
* @kind path-problem
|
||||||
* @problem.severity error
|
* @problem.severity error
|
||||||
* @id vscode-codeql/assert-pure
|
* @id vscode-codeql/assert-pure
|
||||||
* @description The modules stored under `pure` and tested in the `pure-tests`
|
* @description The modules stored under `pure` and tested in the `pure-tests`
|
||||||
* are intended to be "pure".
|
* are intended to be "pure".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import javascript
|
import javascript
|
||||||
|
|
||||||
class VSCodeImport extends ASTNode {
|
class VSCodeImport extends ImportDeclaration {
|
||||||
VSCodeImport() {
|
VSCodeImport() { this.getImportedPath().getValue() = "vscode" }
|
||||||
this.(Import).getImportedPath().getValue() = "vscode"
|
}
|
||||||
|
|
||||||
|
class PureFile extends File {
|
||||||
|
PureFile() {
|
||||||
|
(
|
||||||
|
this.getRelativePath().regexpMatch(".*/src/pure/.*") or
|
||||||
|
this.getRelativePath().regexpMatch(".*/src/common/.*")
|
||||||
|
) and
|
||||||
|
not this.getRelativePath().regexpMatch(".*/vscode/.*")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Import getANonTypeOnlyImport(Module m) {
|
||||||
|
result = m.getAnImport() and not result.(ImportDeclaration).isTypeOnly()
|
||||||
|
}
|
||||||
|
|
||||||
|
query predicate edges(AstNode a, AstNode b) {
|
||||||
|
getANonTypeOnlyImport(a) = b or
|
||||||
|
a.(Import).getImportedModule() = b
|
||||||
|
}
|
||||||
|
|
||||||
from Module m, VSCodeImport v
|
from Module m, VSCodeImport v
|
||||||
where
|
where
|
||||||
m.getFile().getRelativePath().regexpMatch(".*src/pure/.*") and
|
m.getFile() instanceof PureFile and
|
||||||
m.getAnImportedModule*().getAnImport() = v
|
edges+(m, v)
|
||||||
select m, "This module is not pure: it has a transitive dependency on the vscode API imported $@", v, "here"
|
select m, m, v,
|
||||||
|
"This module is not pure: it has a transitive dependency on the vscode API imported $@", v, "here"
|
||||||
|
|||||||
@@ -3,6 +3,10 @@
|
|||||||
## [UNRELEASED]
|
## [UNRELEASED]
|
||||||
|
|
||||||
- Add settings `codeQL.variantAnalysis.defaultResultsFilter` and `codeQL.variantAnalysis.defaultResultsSort` for configuring how variant analysis results are filtered and sorted in the results view. The default is to show all repositories, and to sort by the number of results. [#2392](https://github.com/github/vscode-codeql/pull/2392)
|
- Add settings `codeQL.variantAnalysis.defaultResultsFilter` and `codeQL.variantAnalysis.defaultResultsSort` for configuring how variant analysis results are filtered and sorted in the results view. The default is to show all repositories, and to sort by the number of results. [#2392](https://github.com/github/vscode-codeql/pull/2392)
|
||||||
|
- Fix bug to ensure error messages have complete stack trace in message logs. [#2425](https://github.com/github/vscode-codeql/pull/2425)
|
||||||
|
- Fix bug where the `CodeQL: Compare Query` command did not work for comparing quick-eval queries. [#2422](https://github.com/github/vscode-codeql/pull/2422)
|
||||||
|
- Update text of copy and export buttons in variant analysis results view to clarify that they only copy/export the selected/filtered results. [#2427](https://github.com/github/vscode-codeql/pull/2427)
|
||||||
|
- Add warning when using unsupported CodeQL CLI version. [#2428](https://github.com/github/vscode-codeql/pull/2428)
|
||||||
|
|
||||||
## 1.8.4 - 3 May 2023
|
## 1.8.4 - 3 May 2023
|
||||||
|
|
||||||
|
|||||||
@@ -516,6 +516,10 @@
|
|||||||
"title": "Add new list",
|
"title": "Add new list",
|
||||||
"icon": "$(new-folder)"
|
"icon": "$(new-folder)"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"command": "codeQLVariantAnalysisRepositories.importFromCodeSearch",
|
||||||
|
"title": "Add repositories with GitHub Code Search"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"command": "codeQLVariantAnalysisRepositories.setSelectedItem",
|
"command": "codeQLVariantAnalysisRepositories.setSelectedItem",
|
||||||
"title": "Select"
|
"title": "Select"
|
||||||
@@ -961,6 +965,11 @@
|
|||||||
"when": "view == codeQLVariantAnalysisRepositories && viewItem =~ /canBeOpenedOnGitHub/",
|
"when": "view == codeQLVariantAnalysisRepositories && viewItem =~ /canBeOpenedOnGitHub/",
|
||||||
"group": "2_qlContextMenu@1"
|
"group": "2_qlContextMenu@1"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"command": "codeQLVariantAnalysisRepositories.importFromCodeSearch",
|
||||||
|
"when": "view == codeQLVariantAnalysisRepositories && viewItem =~ /canImportCodeSearch/",
|
||||||
|
"group": "2_qlContextMenu@1"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"command": "codeQLDatabases.setCurrentDatabase",
|
"command": "codeQLDatabases.setCurrentDatabase",
|
||||||
"group": "inline",
|
"group": "inline",
|
||||||
@@ -1297,6 +1306,10 @@
|
|||||||
"command": "codeQLVariantAnalysisRepositories.removeItemContextMenu",
|
"command": "codeQLVariantAnalysisRepositories.removeItemContextMenu",
|
||||||
"when": "false"
|
"when": "false"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"command": "codeQLVariantAnalysisRepositories.importFromCodeSearch",
|
||||||
|
"when": "false"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"command": "codeQLDatabases.setCurrentDatabase",
|
"command": "codeQLDatabases.setCurrentDatabase",
|
||||||
"when": "false"
|
"when": "false"
|
||||||
@@ -1593,6 +1606,10 @@
|
|||||||
"view": "codeQLQueryHistory",
|
"view": "codeQLQueryHistory",
|
||||||
"contents": "You have no query history items at the moment.\n\nSelect a database to run a CodeQL query and get your first results."
|
"contents": "You have no query history items at the moment.\n\nSelect a database to run a CodeQL query and get your first results."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"view": "codeQLQueries",
|
||||||
|
"contents": "This workspace doesn't contain any CodeQL queries at the moment."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"view": "codeQLDatabases",
|
"view": "codeQLDatabases",
|
||||||
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From GitHub](command:codeQLDatabases.chooseDatabaseGithub)"
|
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From GitHub](command:codeQLDatabases.chooseDatabaseGithub)"
|
||||||
|
|||||||
@@ -134,6 +134,11 @@ export interface SourceInfo {
|
|||||||
sourceLocationPrefix: string;
|
sourceLocationPrefix: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The expected output of `codeql resolve queries`.
|
||||||
|
*/
|
||||||
|
export type ResolvedQueries = string[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The expected output of `codeql resolve tests`.
|
* The expected output of `codeql resolve tests`.
|
||||||
*/
|
*/
|
||||||
@@ -213,7 +218,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
private readonly app: App,
|
private readonly app: App,
|
||||||
private distributionProvider: DistributionProvider,
|
private distributionProvider: DistributionProvider,
|
||||||
private cliConfig: CliConfig,
|
private cliConfig: CliConfig,
|
||||||
private logger: Logger,
|
public readonly logger: Logger,
|
||||||
) {
|
) {
|
||||||
this.commandQueue = [];
|
this.commandQueue = [];
|
||||||
this.commandInProcess = false;
|
this.commandInProcess = false;
|
||||||
@@ -325,6 +330,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
commandArgs: string[],
|
commandArgs: string[],
|
||||||
description: string,
|
description: string,
|
||||||
onLine?: OnLineCallback,
|
onLine?: OnLineCallback,
|
||||||
|
silent?: boolean,
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const stderrBuffers: Buffer[] = [];
|
const stderrBuffers: Buffer[] = [];
|
||||||
if (this.commandInProcess) {
|
if (this.commandInProcess) {
|
||||||
@@ -344,7 +350,12 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
// Compute the full args array
|
// Compute the full args array
|
||||||
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
||||||
const argsString = args.join(" ");
|
const argsString = args.join(" ");
|
||||||
void this.logger.log(`${description} using CodeQL CLI: ${argsString}...`);
|
// If we are running silently, we don't want to print anything to the console.
|
||||||
|
if (!silent) {
|
||||||
|
void this.logger.log(
|
||||||
|
`${description} using CodeQL CLI: ${argsString}...`,
|
||||||
|
);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
await new Promise<void>((resolve, reject) => {
|
await new Promise<void>((resolve, reject) => {
|
||||||
// Start listening to stdout
|
// Start listening to stdout
|
||||||
@@ -390,24 +401,30 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
const fullBuffer = Buffer.concat(stdoutBuffers);
|
const fullBuffer = Buffer.concat(stdoutBuffers);
|
||||||
// Make sure we remove the terminator;
|
// Make sure we remove the terminator;
|
||||||
const data = fullBuffer.toString("utf8", 0, fullBuffer.length - 1);
|
const data = fullBuffer.toString("utf8", 0, fullBuffer.length - 1);
|
||||||
void this.logger.log("CLI command succeeded.");
|
if (!silent) {
|
||||||
|
void this.logger.log("CLI command succeeded.");
|
||||||
|
}
|
||||||
return data;
|
return data;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// Kill the process if it isn't already dead.
|
// Kill the process if it isn't already dead.
|
||||||
this.killProcessIfRunning();
|
this.killProcessIfRunning();
|
||||||
// Report the error (if there is a stderr then use that otherwise just report the error cod or nodejs error)
|
// Report the error (if there is a stderr then use that otherwise just report the error code or nodejs error)
|
||||||
const newError =
|
const newError =
|
||||||
stderrBuffers.length === 0
|
stderrBuffers.length === 0
|
||||||
? new Error(`${description} failed: ${err}`)
|
? new Error(
|
||||||
|
`${description} failed with args:${EOL} ${argsString}${EOL}${err}`,
|
||||||
|
)
|
||||||
: new Error(
|
: new Error(
|
||||||
`${description} failed: ${Buffer.concat(stderrBuffers).toString(
|
`${description} failed with args:${EOL} ${argsString}${EOL}${Buffer.concat(
|
||||||
"utf8",
|
stderrBuffers,
|
||||||
)}`,
|
).toString("utf8")}`,
|
||||||
);
|
);
|
||||||
newError.stack += getErrorStack(err);
|
newError.stack += getErrorStack(err);
|
||||||
throw newError;
|
throw newError;
|
||||||
} finally {
|
} finally {
|
||||||
void this.logger.log(Buffer.concat(stderrBuffers).toString("utf8"));
|
if (!silent) {
|
||||||
|
void this.logger.log(Buffer.concat(stderrBuffers).toString("utf8"));
|
||||||
|
}
|
||||||
// Remove the listeners we set up.
|
// Remove the listeners we set up.
|
||||||
process.stdout.removeAllListeners("data");
|
process.stdout.removeAllListeners("data");
|
||||||
process.stderr.removeAllListeners("data");
|
process.stderr.removeAllListeners("data");
|
||||||
@@ -544,9 +561,11 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
{
|
{
|
||||||
progressReporter,
|
progressReporter,
|
||||||
onLine,
|
onLine,
|
||||||
|
silent = false,
|
||||||
}: {
|
}: {
|
||||||
progressReporter?: ProgressReporter;
|
progressReporter?: ProgressReporter;
|
||||||
onLine?: OnLineCallback;
|
onLine?: OnLineCallback;
|
||||||
|
silent?: boolean;
|
||||||
} = {},
|
} = {},
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
if (progressReporter) {
|
if (progressReporter) {
|
||||||
@@ -562,6 +581,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
commandArgs,
|
commandArgs,
|
||||||
description,
|
description,
|
||||||
onLine,
|
onLine,
|
||||||
|
silent,
|
||||||
).then(resolve, reject);
|
).then(resolve, reject);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
@@ -595,10 +615,12 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
addFormat = true,
|
addFormat = true,
|
||||||
progressReporter,
|
progressReporter,
|
||||||
onLine,
|
onLine,
|
||||||
|
silent = false,
|
||||||
}: {
|
}: {
|
||||||
addFormat?: boolean;
|
addFormat?: boolean;
|
||||||
progressReporter?: ProgressReporter;
|
progressReporter?: ProgressReporter;
|
||||||
onLine?: OnLineCallback;
|
onLine?: OnLineCallback;
|
||||||
|
silent?: boolean;
|
||||||
} = {},
|
} = {},
|
||||||
): Promise<OutputType> {
|
): Promise<OutputType> {
|
||||||
let args: string[] = [];
|
let args: string[] = [];
|
||||||
@@ -609,6 +631,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
const result = await this.runCodeQlCliCommand(command, args, description, {
|
const result = await this.runCodeQlCliCommand(command, args, description, {
|
||||||
progressReporter,
|
progressReporter,
|
||||||
onLine,
|
onLine,
|
||||||
|
silent,
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
return JSON.parse(result) as OutputType;
|
return JSON.parse(result) as OutputType;
|
||||||
@@ -731,6 +754,25 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds all available queries in a given directory.
|
||||||
|
* @param queryDir Root of directory tree to search for queries.
|
||||||
|
* @param silent If true, don't print logs to the CodeQL extension log.
|
||||||
|
* @returns The list of queries that were found.
|
||||||
|
*/
|
||||||
|
public async resolveQueries(
|
||||||
|
queryDir: string,
|
||||||
|
silent?: boolean,
|
||||||
|
): Promise<ResolvedQueries> {
|
||||||
|
const subcommandArgs = [queryDir];
|
||||||
|
return await this.runJsonCodeQlCliCommand<ResolvedQueries>(
|
||||||
|
["resolve", "queries"],
|
||||||
|
subcommandArgs,
|
||||||
|
"Resolving queries",
|
||||||
|
{ silent },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds all available QL tests in a given directory.
|
* Finds all available QL tests in a given directory.
|
||||||
* @param testPath Root of directory tree to search for tests.
|
* @param testPath Root of directory tree to search for tests.
|
||||||
@@ -1031,6 +1073,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
resultsPath: string,
|
resultsPath: string,
|
||||||
interpretedResultsPath: string,
|
interpretedResultsPath: string,
|
||||||
sourceInfo?: SourceInfo,
|
sourceInfo?: SourceInfo,
|
||||||
|
args?: string[],
|
||||||
): Promise<sarif.Log> {
|
): Promise<sarif.Log> {
|
||||||
const additionalArgs = [
|
const additionalArgs = [
|
||||||
// TODO: This flag means that we don't group interpreted results
|
// TODO: This flag means that we don't group interpreted results
|
||||||
@@ -1038,6 +1081,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
// interpretation with and without this flag, or do some
|
// interpretation with and without this flag, or do some
|
||||||
// grouping client-side.
|
// grouping client-side.
|
||||||
"--no-group-results",
|
"--no-group-results",
|
||||||
|
...(args ?? []),
|
||||||
];
|
];
|
||||||
|
|
||||||
await this.runInterpretCommand(
|
await this.runInterpretCommand(
|
||||||
@@ -1737,6 +1781,10 @@ export function shouldDebugCliServer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class CliVersionConstraint {
|
export class CliVersionConstraint {
|
||||||
|
// The oldest version of the CLI that we support. This is used to determine
|
||||||
|
// whether to show a warning about the CLI being too old on startup.
|
||||||
|
public static OLDEST_SUPPORTED_CLI_VERSION = new SemVer("2.7.6");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CLI version where building QLX packs for remote queries is supported.
|
* CLI version where building QLX packs for remote queries is supported.
|
||||||
* (The options were _accepted_ by a few earlier versions, but only from
|
* (The options were _accepted_ by a few earlier versions, but only from
|
||||||
@@ -1795,6 +1843,8 @@ export class CliVersionConstraint {
|
|||||||
"2.12.4",
|
"2.12.4",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
public static CLI_VERSION_GLOBAL_CACHE = new SemVer("2.12.4");
|
||||||
|
|
||||||
constructor(private readonly cli: CodeQLCliServer) {
|
constructor(private readonly cli: CodeQLCliServer) {
|
||||||
/**/
|
/**/
|
||||||
}
|
}
|
||||||
@@ -1864,4 +1914,8 @@ export class CliVersionConstraint {
|
|||||||
CliVersionConstraint.CLI_VERSION_WITH_ADDITIONAL_PACKS_INSTALL,
|
CliVersionConstraint.CLI_VERSION_WITH_ADDITIONAL_PACKS_INSTALL,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async usesGlobalCompilationCache() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_GLOBAL_CACHE);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,12 +6,7 @@ import * as semver from "semver";
|
|||||||
import { URL } from "url";
|
import { URL } from "url";
|
||||||
import { ExtensionContext, Event } from "vscode";
|
import { ExtensionContext, Event } from "vscode";
|
||||||
import { DistributionConfig } from "../config";
|
import { DistributionConfig } from "../config";
|
||||||
import {
|
import { showAndLogErrorMessage, showAndLogWarningMessage } from "../helpers";
|
||||||
InvocationRateLimiter,
|
|
||||||
InvocationRateLimiterResultKind,
|
|
||||||
showAndLogErrorMessage,
|
|
||||||
showAndLogWarningMessage,
|
|
||||||
} from "../helpers";
|
|
||||||
import { extLogger } from "../common";
|
import { extLogger } from "../common";
|
||||||
import { getCodeQlCliVersion } from "./cli-version";
|
import { getCodeQlCliVersion } from "./cli-version";
|
||||||
import {
|
import {
|
||||||
@@ -24,6 +19,10 @@ import {
|
|||||||
extractZipArchive,
|
extractZipArchive,
|
||||||
getRequiredAssetName,
|
getRequiredAssetName,
|
||||||
} from "../pure/distribution";
|
} from "../pure/distribution";
|
||||||
|
import {
|
||||||
|
InvocationRateLimiter,
|
||||||
|
InvocationRateLimiterResultKind,
|
||||||
|
} from "../common/invocation-rate-limiter";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* distribution.ts
|
* distribution.ts
|
||||||
@@ -76,7 +75,7 @@ export class DistributionManager implements DistributionProvider {
|
|||||||
extensionContext,
|
extensionContext,
|
||||||
);
|
);
|
||||||
this.updateCheckRateLimiter = new InvocationRateLimiter(
|
this.updateCheckRateLimiter = new InvocationRateLimiter(
|
||||||
extensionContext,
|
extensionContext.globalState,
|
||||||
"extensionSpecificDistributionUpdateCheck",
|
"extensionSpecificDistributionUpdateCheck",
|
||||||
() =>
|
() =>
|
||||||
this.extensionSpecificDistributionManager.checkForUpdatesToDistribution(),
|
this.extensionSpecificDistributionManager.checkForUpdatesToDistribution(),
|
||||||
|
|||||||
@@ -4,6 +4,11 @@ import { AppEventEmitter } from "./events";
|
|||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { Memento } from "./memento";
|
import { Memento } from "./memento";
|
||||||
import { AppCommandManager } from "./commands";
|
import { AppCommandManager } from "./commands";
|
||||||
|
import type {
|
||||||
|
WorkspaceFolder,
|
||||||
|
Event,
|
||||||
|
WorkspaceFoldersChangeEvent,
|
||||||
|
} from "vscode";
|
||||||
|
|
||||||
export interface App {
|
export interface App {
|
||||||
createEventEmitter<T>(): AppEventEmitter<T>;
|
createEventEmitter<T>(): AppEventEmitter<T>;
|
||||||
@@ -14,6 +19,8 @@ export interface App {
|
|||||||
readonly globalStoragePath: string;
|
readonly globalStoragePath: string;
|
||||||
readonly workspaceStoragePath?: string;
|
readonly workspaceStoragePath?: string;
|
||||||
readonly workspaceState: Memento;
|
readonly workspaceState: Memento;
|
||||||
|
readonly workspaceFolders: readonly WorkspaceFolder[] | undefined;
|
||||||
|
readonly onDidChangeWorkspaceFolders: Event<WorkspaceFoldersChangeEvent>;
|
||||||
readonly credentials: Credentials;
|
readonly credentials: Credentials;
|
||||||
readonly commands: AppCommandManager;
|
readonly commands: AppCommandManager;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -251,6 +251,9 @@ export type VariantAnalysisCommands = {
|
|||||||
"codeQL.monitorRehydratedVariantAnalysis": (
|
"codeQL.monitorRehydratedVariantAnalysis": (
|
||||||
variantAnalysis: VariantAnalysis,
|
variantAnalysis: VariantAnalysis,
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
|
"codeQL.monitorReauthenticatedVariantAnalysis": (
|
||||||
|
variantAnalysis: VariantAnalysis,
|
||||||
|
) => Promise<void>;
|
||||||
"codeQL.openVariantAnalysisLogs": (
|
"codeQL.openVariantAnalysisLogs": (
|
||||||
variantAnalysisId: number,
|
variantAnalysisId: number,
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
@@ -272,6 +275,7 @@ export type DatabasePanelCommands = {
|
|||||||
"codeQLVariantAnalysisRepositories.openOnGitHubContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
"codeQLVariantAnalysisRepositories.openOnGitHubContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||||
"codeQLVariantAnalysisRepositories.renameItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
"codeQLVariantAnalysisRepositories.renameItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||||
"codeQLVariantAnalysisRepositories.removeItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
"codeQLVariantAnalysisRepositories.removeItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||||
|
"codeQLVariantAnalysisRepositories.importFromCodeSearch": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AstCfgCommands = {
|
export type AstCfgCommands = {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { DisposableObject } from "../pure/disposable-object";
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
import { extLogger } from "./logging/vscode/loggers";
|
|
||||||
import { getErrorMessage } from "../pure/helpers-pure";
|
import { getErrorMessage } from "../pure/helpers-pure";
|
||||||
|
import { Logger } from "./logging";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for "discovery" operations, which scan the file system to find specific kinds of
|
* Base class for "discovery" operations, which scan the file system to find specific kinds of
|
||||||
@@ -8,18 +8,28 @@ import { getErrorMessage } from "../pure/helpers-pure";
|
|||||||
* same time.
|
* same time.
|
||||||
*/
|
*/
|
||||||
export abstract class Discovery<T> extends DisposableObject {
|
export abstract class Discovery<T> extends DisposableObject {
|
||||||
private retry = false;
|
private restartWhenFinished = false;
|
||||||
private discoveryInProgress = false;
|
private currentDiscoveryPromise: Promise<void> | undefined;
|
||||||
|
|
||||||
constructor(private readonly name: string) {
|
constructor(private readonly name: string, private readonly logger: Logger) {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the promise of the currently running refresh operation, if one is in progress.
|
||||||
|
* Otherwise returns a promise that resolves immediately.
|
||||||
|
*/
|
||||||
|
public waitForCurrentRefresh(): Promise<void> {
|
||||||
|
return this.currentDiscoveryPromise ?? Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Force the discovery process to run. Normally invoked by the derived class when a relevant file
|
* Force the discovery process to run. Normally invoked by the derived class when a relevant file
|
||||||
* system change is detected.
|
* system change is detected.
|
||||||
|
*
|
||||||
|
* Returns a promise that resolves when the refresh is complete, including any retries.
|
||||||
*/
|
*/
|
||||||
public refresh(): void {
|
public refresh(): Promise<void> {
|
||||||
// We avoid having multiple discovery operations in progress at the same time. Otherwise, if we
|
// We avoid having multiple discovery operations in progress at the same time. Otherwise, if we
|
||||||
// got a storm of refresh requests due to, say, the copying or deletion of a large directory
|
// got a storm of refresh requests due to, say, the copying or deletion of a large directory
|
||||||
// tree, we could potentially spawn a separate simultaneous discovery operation for each
|
// tree, we could potentially spawn a separate simultaneous discovery operation for each
|
||||||
@@ -36,14 +46,16 @@ export abstract class Discovery<T> extends DisposableObject {
|
|||||||
// other change notifications that might be coming along. However, this would create more
|
// other change notifications that might be coming along. However, this would create more
|
||||||
// latency in the common case, in order to save a bit of latency in the uncommon case.
|
// latency in the common case, in order to save a bit of latency in the uncommon case.
|
||||||
|
|
||||||
if (this.discoveryInProgress) {
|
if (this.currentDiscoveryPromise !== undefined) {
|
||||||
// There's already a discovery operation in progress. Tell it to restart when it's done.
|
// There's already a discovery operation in progress. Tell it to restart when it's done.
|
||||||
this.retry = true;
|
this.restartWhenFinished = true;
|
||||||
} else {
|
} else {
|
||||||
// No discovery in progress, so start one now.
|
// No discovery in progress, so start one now.
|
||||||
this.discoveryInProgress = true;
|
this.currentDiscoveryPromise = this.launchDiscovery().finally(() => {
|
||||||
this.launchDiscovery();
|
this.currentDiscoveryPromise = undefined;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
return this.currentDiscoveryPromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -51,34 +63,31 @@ export abstract class Discovery<T> extends DisposableObject {
|
|||||||
* discovery operation completes, the `update` function will be invoked with the results of the
|
* discovery operation completes, the `update` function will be invoked with the results of the
|
||||||
* discovery.
|
* discovery.
|
||||||
*/
|
*/
|
||||||
private launchDiscovery(): void {
|
private async launchDiscovery(): Promise<void> {
|
||||||
const discoveryPromise = this.discover();
|
let results: T | undefined;
|
||||||
discoveryPromise
|
try {
|
||||||
.then((results) => {
|
results = await this.discover();
|
||||||
if (!this.retry) {
|
} catch (err) {
|
||||||
// Update any listeners with the results of the discovery.
|
void this.logger.log(
|
||||||
this.discoveryInProgress = false;
|
`${this.name} failed. Reason: ${getErrorMessage(err)}`,
|
||||||
this.update(results);
|
);
|
||||||
}
|
results = undefined;
|
||||||
})
|
}
|
||||||
|
|
||||||
.catch((err: unknown) => {
|
if (this.restartWhenFinished) {
|
||||||
void extLogger.log(
|
// Another refresh request came in while we were still running a previous discovery
|
||||||
`${this.name} failed. Reason: ${getErrorMessage(err)}`,
|
// operation. Since the discovery results we just computed are now stale, we'll launch
|
||||||
);
|
// another discovery operation instead of updating.
|
||||||
})
|
// We want to relaunch discovery regardless of if the initial discovery operation
|
||||||
|
// succeeded or failed.
|
||||||
.finally(() => {
|
this.restartWhenFinished = false;
|
||||||
if (this.retry) {
|
await this.launchDiscovery();
|
||||||
// Another refresh request came in while we were still running a previous discovery
|
} else {
|
||||||
// operation. Since the discovery results we just computed are now stale, we'll launch
|
// If the discovery was successful, then update any listeners with the results.
|
||||||
// another discovery operation instead of updating.
|
if (results !== undefined) {
|
||||||
// Note that by doing this inside of `finally`, we will relaunch discovery even if the
|
this.update(results);
|
||||||
// initial discovery operation failed.
|
}
|
||||||
this.retry = false;
|
}
|
||||||
this.launchDiscovery();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ export interface AppEvent<T> {
|
|||||||
(listener: (event: T) => void): Disposable;
|
(listener: (event: T) => void): Disposable;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AppEventEmitter<T> {
|
export interface AppEventEmitter<T> extends Disposable {
|
||||||
event: AppEvent<T>;
|
event: AppEvent<T>;
|
||||||
fire(data: T): void;
|
fire(data: T): void;
|
||||||
}
|
}
|
||||||
|
|||||||
106
extensions/ql-vscode/src/common/file-tree-nodes.ts
Normal file
106
extensions/ql-vscode/src/common/file-tree-nodes.ts
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
import { basename, dirname, join } from "path";
|
||||||
|
import { env } from "vscode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A node in the tree of files. This will be either a `FileTreeDirectory` or a `FileTreeLeaf`.
|
||||||
|
*/
|
||||||
|
export abstract class FileTreeNode {
|
||||||
|
constructor(private _path: string, private _name: string) {}
|
||||||
|
|
||||||
|
public get path(): string {
|
||||||
|
return this._path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get name(): string {
|
||||||
|
return this._name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract get children(): readonly FileTreeNode[];
|
||||||
|
|
||||||
|
public abstract finish(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A directory containing one or more files or other directories.
|
||||||
|
*/
|
||||||
|
export class FileTreeDirectory extends FileTreeNode {
|
||||||
|
constructor(
|
||||||
|
_path: string,
|
||||||
|
_name: string,
|
||||||
|
private _children: FileTreeNode[] = [],
|
||||||
|
) {
|
||||||
|
super(_path, _name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get children(): readonly FileTreeNode[] {
|
||||||
|
return this._children;
|
||||||
|
}
|
||||||
|
|
||||||
|
public addChild(child: FileTreeNode): void {
|
||||||
|
this._children.push(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
public createDirectory(relativePath: string): FileTreeDirectory {
|
||||||
|
if (relativePath === ".") {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
const dirName = dirname(relativePath);
|
||||||
|
if (dirName === ".") {
|
||||||
|
return this.createChildDirectory(relativePath);
|
||||||
|
} else {
|
||||||
|
const parent = this.createDirectory(dirName);
|
||||||
|
return parent.createDirectory(basename(relativePath));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public finish(): void {
|
||||||
|
// remove empty directories
|
||||||
|
this._children.filter(
|
||||||
|
(child) => child instanceof FileTreeLeaf || child.children.length > 0,
|
||||||
|
);
|
||||||
|
this._children.sort((a, b) => a.name.localeCompare(b.name, env.language));
|
||||||
|
this._children.forEach((child, i) => {
|
||||||
|
child.finish();
|
||||||
|
if (
|
||||||
|
child.children?.length === 1 &&
|
||||||
|
child.children[0] instanceof FileTreeDirectory
|
||||||
|
) {
|
||||||
|
// collapse children
|
||||||
|
const replacement = new FileTreeDirectory(
|
||||||
|
child.children[0].path,
|
||||||
|
`${child.name} / ${child.children[0].name}`,
|
||||||
|
Array.from(child.children[0].children),
|
||||||
|
);
|
||||||
|
this._children[i] = replacement;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private createChildDirectory(name: string): FileTreeDirectory {
|
||||||
|
const existingChild = this._children.find((child) => child.name === name);
|
||||||
|
if (existingChild !== undefined) {
|
||||||
|
return existingChild as FileTreeDirectory;
|
||||||
|
} else {
|
||||||
|
const newChild = new FileTreeDirectory(join(this.path, name), name);
|
||||||
|
this.addChild(newChild);
|
||||||
|
return newChild;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single file.
|
||||||
|
*/
|
||||||
|
export class FileTreeLeaf extends FileTreeNode {
|
||||||
|
constructor(_path: string, _name: string) {
|
||||||
|
super(_path, _name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get children(): readonly FileTreeNode[] {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
public finish(): void {
|
||||||
|
/**/
|
||||||
|
}
|
||||||
|
}
|
||||||
89
extensions/ql-vscode/src/common/invocation-rate-limiter.ts
Normal file
89
extensions/ql-vscode/src/common/invocation-rate-limiter.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { Memento } from "./memento";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides a utility method to invoke a function only if a minimum time interval has elapsed since
|
||||||
|
* the last invocation of that function.
|
||||||
|
*/
|
||||||
|
export class InvocationRateLimiter<T> {
|
||||||
|
constructor(
|
||||||
|
private readonly globalState: Memento,
|
||||||
|
private readonly funcIdentifier: string,
|
||||||
|
private readonly func: () => Promise<T>,
|
||||||
|
private readonly createDate: (dateString?: string) => Date = (s) =>
|
||||||
|
s ? new Date(s) : new Date(),
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoke the function if `minSecondsSinceLastInvocation` seconds have elapsed since the last invocation.
|
||||||
|
*/
|
||||||
|
public async invokeFunctionIfIntervalElapsed(
|
||||||
|
minSecondsSinceLastInvocation: number,
|
||||||
|
): Promise<InvocationRateLimiterResult<T>> {
|
||||||
|
const updateCheckStartDate = this.createDate();
|
||||||
|
const lastInvocationDate = this.getLastInvocationDate();
|
||||||
|
if (
|
||||||
|
minSecondsSinceLastInvocation &&
|
||||||
|
lastInvocationDate &&
|
||||||
|
lastInvocationDate <= updateCheckStartDate &&
|
||||||
|
lastInvocationDate.getTime() + minSecondsSinceLastInvocation * 1000 >
|
||||||
|
updateCheckStartDate.getTime()
|
||||||
|
) {
|
||||||
|
return createRateLimitedResult();
|
||||||
|
}
|
||||||
|
const result = await this.func();
|
||||||
|
await this.setLastInvocationDate(updateCheckStartDate);
|
||||||
|
return createInvokedResult(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getLastInvocationDate(): Date | undefined {
|
||||||
|
const maybeDateString: string | undefined = this.globalState.get(
|
||||||
|
InvocationRateLimiter._invocationRateLimiterPrefix + this.funcIdentifier,
|
||||||
|
);
|
||||||
|
return maybeDateString ? this.createDate(maybeDateString) : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setLastInvocationDate(date: Date): Promise<void> {
|
||||||
|
return await this.globalState.update(
|
||||||
|
InvocationRateLimiter._invocationRateLimiterPrefix + this.funcIdentifier,
|
||||||
|
date,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static readonly _invocationRateLimiterPrefix =
|
||||||
|
"invocationRateLimiter_lastInvocationDate_";
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum InvocationRateLimiterResultKind {
|
||||||
|
Invoked,
|
||||||
|
RateLimited,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function was invoked and returned the value `result`.
|
||||||
|
*/
|
||||||
|
interface InvokedResult<T> {
|
||||||
|
kind: InvocationRateLimiterResultKind.Invoked;
|
||||||
|
result: T;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function was not invoked as the minimum interval since the last invocation had not elapsed.
|
||||||
|
*/
|
||||||
|
interface RateLimitedResult {
|
||||||
|
kind: InvocationRateLimiterResultKind.RateLimited;
|
||||||
|
}
|
||||||
|
|
||||||
|
type InvocationRateLimiterResult<T> = InvokedResult<T> | RateLimitedResult;
|
||||||
|
|
||||||
|
function createInvokedResult<T>(result: T): InvokedResult<T> {
|
||||||
|
return {
|
||||||
|
kind: InvocationRateLimiterResultKind.Invoked,
|
||||||
|
result,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createRateLimitedResult(): RateLimitedResult {
|
||||||
|
return {
|
||||||
|
kind: InvocationRateLimiterResultKind.RateLimited,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -3,7 +3,7 @@ import * as Octokit from "@octokit/rest";
|
|||||||
import { retry } from "@octokit/plugin-retry";
|
import { retry } from "@octokit/plugin-retry";
|
||||||
import { Credentials } from "../authentication";
|
import { Credentials } from "../authentication";
|
||||||
|
|
||||||
const GITHUB_AUTH_PROVIDER_ID = "github";
|
export const GITHUB_AUTH_PROVIDER_ID = "github";
|
||||||
|
|
||||||
// We need 'repo' scope for triggering workflows, 'gist' scope for exporting results to Gist,
|
// We need 'repo' scope for triggering workflows, 'gist' scope for exporting results to Gist,
|
||||||
// and 'read:packages' for reading private CodeQL packages.
|
// and 'read:packages' for reading private CodeQL packages.
|
||||||
|
|||||||
@@ -49,7 +49,6 @@ export function registerCommandWithErrorHandling(
|
|||||||
const errorMessage = redactableError(error)`${
|
const errorMessage = redactableError(error)`${
|
||||||
getErrorMessage(e) || e
|
getErrorMessage(e) || e
|
||||||
} (${commandId})`;
|
} (${commandId})`;
|
||||||
const errorStack = getErrorStack(e);
|
|
||||||
if (e instanceof UserCancellationException) {
|
if (e instanceof UserCancellationException) {
|
||||||
// User has cancelled this action manually
|
// User has cancelled this action manually
|
||||||
if (e.silent) {
|
if (e.silent) {
|
||||||
@@ -61,6 +60,7 @@ export function registerCommandWithErrorHandling(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Include the full stack in the error log only.
|
// Include the full stack in the error log only.
|
||||||
|
const errorStack = getErrorStack(e);
|
||||||
const fullMessage = errorStack
|
const fullMessage = errorStack
|
||||||
? `${errorMessage.fullMessage}\n${errorStack}`
|
? `${errorMessage.fullMessage}\n${errorStack}`
|
||||||
: errorMessage.fullMessage;
|
: errorMessage.fullMessage;
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import { showAndLogErrorMessage } from "../helpers";
|
import { showAndLogErrorMessage } from "../../helpers";
|
||||||
import {
|
import {
|
||||||
ExplorerSelectionCommandFunction,
|
ExplorerSelectionCommandFunction,
|
||||||
TreeViewContextMultiSelectionCommandFunction,
|
TreeViewContextMultiSelectionCommandFunction,
|
||||||
TreeViewContextSingleSelectionCommandFunction,
|
TreeViewContextSingleSelectionCommandFunction,
|
||||||
} from "./commands";
|
} from "../commands";
|
||||||
|
|
||||||
// A hack to match types that are not an array, which is useful to help avoid
|
// A hack to match types that are not an array, which is useful to help avoid
|
||||||
// misusing createSingleSelectionCommand, e.g. where T accidentally gets instantiated
|
// misusing createSingleSelectionCommand, e.g. where T accidentally gets instantiated
|
||||||
@@ -39,6 +39,14 @@ export class ExtensionApp implements App {
|
|||||||
return this.extensionContext.workspaceState;
|
return this.extensionContext.workspaceState;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get workspaceFolders(): readonly vscode.WorkspaceFolder[] | undefined {
|
||||||
|
return vscode.workspace.workspaceFolders;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get onDidChangeWorkspaceFolders(): vscode.Event<vscode.WorkspaceFoldersChangeEvent> {
|
||||||
|
return vscode.workspace.onDidChangeWorkspaceFolders;
|
||||||
|
}
|
||||||
|
|
||||||
public get subscriptions(): Disposable[] {
|
public get subscriptions(): Disposable[] {
|
||||||
return this.extensionContext.subscriptions;
|
return this.extensionContext.subscriptions;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -175,21 +175,40 @@ export class CompareView extends AbstractWebview<
|
|||||||
const commonResultSetNames = fromSchemaNames.filter((name) =>
|
const commonResultSetNames = fromSchemaNames.filter((name) =>
|
||||||
toSchemaNames.includes(name),
|
toSchemaNames.includes(name),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Fall back on the default result set names if there are no common ones.
|
||||||
|
const defaultFromResultSetName = fromSchemaNames.find((name) =>
|
||||||
|
name.startsWith("#"),
|
||||||
|
);
|
||||||
|
const defaultToResultSetName = toSchemaNames.find((name) =>
|
||||||
|
name.startsWith("#"),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (
|
||||||
|
commonResultSetNames.length === 0 &&
|
||||||
|
!(defaultFromResultSetName || defaultToResultSetName)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
"No common result sets found between the two queries. Please check that the queries are compatible.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const currentResultSetName =
|
const currentResultSetName =
|
||||||
selectedResultSetName || commonResultSetNames[0];
|
selectedResultSetName || commonResultSetNames[0];
|
||||||
const fromResultSet = await this.getResultSet(
|
const fromResultSet = await this.getResultSet(
|
||||||
fromSchemas,
|
fromSchemas,
|
||||||
currentResultSetName,
|
currentResultSetName || defaultFromResultSetName!,
|
||||||
from.completedQuery.query.resultsPaths.resultsPath,
|
from.completedQuery.query.resultsPaths.resultsPath,
|
||||||
);
|
);
|
||||||
const toResultSet = await this.getResultSet(
|
const toResultSet = await this.getResultSet(
|
||||||
toSchemas,
|
toSchemas,
|
||||||
currentResultSetName,
|
currentResultSetName || defaultToResultSetName!,
|
||||||
to.completedQuery.query.resultsPaths.resultsPath,
|
to.completedQuery.query.resultsPaths.resultsPath,
|
||||||
);
|
);
|
||||||
return [
|
return [
|
||||||
commonResultSetNames,
|
commonResultSetNames,
|
||||||
currentResultSetName,
|
currentResultSetName ||
|
||||||
|
`${defaultFromResultSetName} <-> ${defaultToResultSetName}`,
|
||||||
fromResultSet,
|
fromResultSet,
|
||||||
toResultSet,
|
toResultSet,
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -711,3 +711,10 @@ const QUERIES_PANEL = new Setting("queriesPanel", ROOT_SETTING);
|
|||||||
export function showQueriesPanel(): boolean {
|
export function showQueriesPanel(): boolean {
|
||||||
return !!QUERIES_PANEL.getValue<boolean>();
|
return !!QUERIES_PANEL.getValue<boolean>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DATA_EXTENSIONS = new Setting("dataExtensions", ROOT_SETTING);
|
||||||
|
const LLM_GENERATION = new Setting("llmGeneration", DATA_EXTENSIONS);
|
||||||
|
|
||||||
|
export function showLlmGeneration(): boolean {
|
||||||
|
return !!LLM_GENERATION.getValue<boolean>();
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,54 @@
|
|||||||
|
import { Credentials } from "../common/authentication";
|
||||||
|
import { OctokitResponse } from "@octokit/types";
|
||||||
|
|
||||||
|
export enum ClassificationType {
|
||||||
|
Unknown = "CLASSIFICATION_TYPE_UNKNOWN",
|
||||||
|
Neutral = "CLASSIFICATION_TYPE_NEUTRAL",
|
||||||
|
Source = "CLASSIFICATION_TYPE_SOURCE",
|
||||||
|
Sink = "CLASSIFICATION_TYPE_SINK",
|
||||||
|
Summary = "CLASSIFICATION_TYPE_SUMMARY",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Classification {
|
||||||
|
type: ClassificationType;
|
||||||
|
kind: string;
|
||||||
|
explanation: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Method {
|
||||||
|
package: string;
|
||||||
|
type: string;
|
||||||
|
name: string;
|
||||||
|
signature: string;
|
||||||
|
usages: string[];
|
||||||
|
classification?: Classification;
|
||||||
|
input?: string;
|
||||||
|
output?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ModelRequest {
|
||||||
|
language: string;
|
||||||
|
candidates: Method[];
|
||||||
|
samples: Method[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ModelResponse {
|
||||||
|
language: string;
|
||||||
|
predicted: Method[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function autoModel(
|
||||||
|
credentials: Credentials,
|
||||||
|
request: ModelRequest,
|
||||||
|
): Promise<ModelResponse> {
|
||||||
|
const octokit = await credentials.getOctokit();
|
||||||
|
|
||||||
|
const response: OctokitResponse<ModelResponse> = await octokit.request(
|
||||||
|
"POST /repos/github/codeql/code-scanning/codeql/auto-model",
|
||||||
|
{
|
||||||
|
data: request,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
@@ -0,0 +1,136 @@
|
|||||||
|
import { CancellationTokenSource } from "vscode";
|
||||||
|
import { join } from "path";
|
||||||
|
import { runQuery } from "./external-api-usage-query";
|
||||||
|
import { CodeQLCliServer } from "../codeql-cli/cli";
|
||||||
|
import { QueryRunner } from "../query-server";
|
||||||
|
import { DatabaseItem } from "../databases/local-databases";
|
||||||
|
import { interpretResultsSarif } from "../query-results";
|
||||||
|
import { ProgressCallback } from "../common/vscode/progress";
|
||||||
|
|
||||||
|
type Options = {
|
||||||
|
cliServer: CodeQLCliServer;
|
||||||
|
queryRunner: QueryRunner;
|
||||||
|
databaseItem: DatabaseItem;
|
||||||
|
queryStorageDir: string;
|
||||||
|
|
||||||
|
progress: ProgressCallback;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type UsageSnippetsBySignature = Record<string, string[]>;
|
||||||
|
|
||||||
|
export async function getAutoModelUsages({
|
||||||
|
cliServer,
|
||||||
|
queryRunner,
|
||||||
|
databaseItem,
|
||||||
|
queryStorageDir,
|
||||||
|
progress,
|
||||||
|
}: Options): Promise<UsageSnippetsBySignature> {
|
||||||
|
const maxStep = 1500;
|
||||||
|
|
||||||
|
const cancellationTokenSource = new CancellationTokenSource();
|
||||||
|
|
||||||
|
// This will re-run the query that was already run when opening the data extensions editor. This
|
||||||
|
// might be unnecessary, but this makes it really easy to get the path to the BQRS file which we
|
||||||
|
// need to interpret the results.
|
||||||
|
const queryResult = await runQuery({
|
||||||
|
cliServer,
|
||||||
|
queryRunner,
|
||||||
|
queryStorageDir,
|
||||||
|
databaseItem,
|
||||||
|
progress: (update) =>
|
||||||
|
progress({
|
||||||
|
maxStep,
|
||||||
|
step: update.step,
|
||||||
|
message: update.message,
|
||||||
|
}),
|
||||||
|
token: cancellationTokenSource.token,
|
||||||
|
});
|
||||||
|
if (!queryResult) {
|
||||||
|
throw new Error("Query failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
progress({
|
||||||
|
maxStep,
|
||||||
|
step: 1100,
|
||||||
|
message: "Retrieving source location prefix",
|
||||||
|
});
|
||||||
|
|
||||||
|
// CodeQL needs to have access to the database to be able to retrieve the
|
||||||
|
// snippets from it. The source location prefix is used to determine the
|
||||||
|
// base path of the database.
|
||||||
|
const sourceLocationPrefix = await databaseItem.getSourceLocationPrefix(
|
||||||
|
cliServer,
|
||||||
|
);
|
||||||
|
const sourceArchiveUri = databaseItem.sourceArchive;
|
||||||
|
const sourceInfo =
|
||||||
|
sourceArchiveUri === undefined
|
||||||
|
? undefined
|
||||||
|
: {
|
||||||
|
sourceArchive: sourceArchiveUri.fsPath,
|
||||||
|
sourceLocationPrefix,
|
||||||
|
};
|
||||||
|
|
||||||
|
progress({
|
||||||
|
maxStep,
|
||||||
|
step: 1200,
|
||||||
|
message: "Interpreting results",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert the results to SARIF so that Codeql will retrieve the snippets
|
||||||
|
// from the datababe. This means we don't need to do that in the extension
|
||||||
|
// and everything is handled by the CodeQL CLI.
|
||||||
|
const sarif = await interpretResultsSarif(
|
||||||
|
cliServer,
|
||||||
|
{
|
||||||
|
// To interpret the results we need to provide metadata about the query. We could do this using
|
||||||
|
// `resolveMetadata` but that would be an extra call to the CodeQL CLI server and would require
|
||||||
|
// us to know the path to the query on the filesystem. Since we know what the metadata should
|
||||||
|
// look like and the only metadata that the CodeQL CLI requires is an ID and the kind, we can
|
||||||
|
// simply use constants here.
|
||||||
|
kind: "problem",
|
||||||
|
id: "usage",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
resultsPath: queryResult.outputDir.bqrsPath,
|
||||||
|
interpretedResultsPath: join(
|
||||||
|
queryStorageDir,
|
||||||
|
"interpreted-results.sarif",
|
||||||
|
),
|
||||||
|
},
|
||||||
|
sourceInfo,
|
||||||
|
["--sarif-add-snippets"],
|
||||||
|
);
|
||||||
|
|
||||||
|
progress({
|
||||||
|
maxStep,
|
||||||
|
step: 1400,
|
||||||
|
message: "Parsing results",
|
||||||
|
});
|
||||||
|
|
||||||
|
const snippets: UsageSnippetsBySignature = {};
|
||||||
|
|
||||||
|
const results = sarif.runs[0]?.results;
|
||||||
|
if (!results) {
|
||||||
|
throw new Error("No results");
|
||||||
|
}
|
||||||
|
|
||||||
|
// This will group the snippets by the method signature.
|
||||||
|
for (const result of results) {
|
||||||
|
const signature = result.message.text;
|
||||||
|
|
||||||
|
const snippet =
|
||||||
|
result.locations?.[0]?.physicalLocation?.contextRegion?.snippet?.text;
|
||||||
|
|
||||||
|
if (!signature || !snippet) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(signature in snippets)) {
|
||||||
|
snippets[signature] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
snippets[signature].push(snippet);
|
||||||
|
}
|
||||||
|
|
||||||
|
return snippets;
|
||||||
|
}
|
||||||
222
extensions/ql-vscode/src/data-extensions-editor/auto-model.ts
Normal file
222
extensions/ql-vscode/src/data-extensions-editor/auto-model.ts
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import { ExternalApiUsage } from "./external-api-usage";
|
||||||
|
import { ModeledMethod, ModeledMethodType } from "./modeled-method";
|
||||||
|
import {
|
||||||
|
Classification,
|
||||||
|
ClassificationType,
|
||||||
|
Method,
|
||||||
|
ModelRequest,
|
||||||
|
} from "./auto-model-api";
|
||||||
|
import type { UsageSnippetsBySignature } from "./auto-model-usages-query";
|
||||||
|
|
||||||
|
export function createAutoModelRequest(
|
||||||
|
language: string,
|
||||||
|
externalApiUsages: ExternalApiUsage[],
|
||||||
|
modeledMethods: Record<string, ModeledMethod>,
|
||||||
|
usages: UsageSnippetsBySignature,
|
||||||
|
): ModelRequest {
|
||||||
|
const request: ModelRequest = {
|
||||||
|
language,
|
||||||
|
samples: [],
|
||||||
|
candidates: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sort by number of usages so we always send the most used methods first
|
||||||
|
externalApiUsages = [...externalApiUsages];
|
||||||
|
externalApiUsages.sort((a, b) => b.usages.length - a.usages.length);
|
||||||
|
|
||||||
|
for (const externalApiUsage of externalApiUsages) {
|
||||||
|
const modeledMethod: ModeledMethod = modeledMethods[
|
||||||
|
externalApiUsage.signature
|
||||||
|
] ?? {
|
||||||
|
type: "none",
|
||||||
|
};
|
||||||
|
|
||||||
|
const usagesForMethod =
|
||||||
|
usages[externalApiUsage.signature] ??
|
||||||
|
externalApiUsage.usages.map((usage) => usage.label);
|
||||||
|
|
||||||
|
const numberOfArguments =
|
||||||
|
externalApiUsage.methodParameters === "()"
|
||||||
|
? 0
|
||||||
|
: externalApiUsage.methodParameters.split(",").length;
|
||||||
|
|
||||||
|
for (
|
||||||
|
let argumentIndex = 0;
|
||||||
|
argumentIndex < numberOfArguments;
|
||||||
|
argumentIndex++
|
||||||
|
) {
|
||||||
|
const method: Method = {
|
||||||
|
package: externalApiUsage.packageName,
|
||||||
|
type: externalApiUsage.typeName,
|
||||||
|
name: externalApiUsage.methodName,
|
||||||
|
signature: externalApiUsage.methodParameters,
|
||||||
|
classification:
|
||||||
|
modeledMethod.type === "none"
|
||||||
|
? undefined
|
||||||
|
: toMethodClassification(modeledMethod),
|
||||||
|
usages: usagesForMethod.slice(0, 10),
|
||||||
|
input: `Argument[${argumentIndex}]`,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (modeledMethod.type === "none") {
|
||||||
|
request.candidates.push(method);
|
||||||
|
} else {
|
||||||
|
request.samples.push(method);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
request.candidates = request.candidates.slice(0, 20);
|
||||||
|
request.samples = request.samples.slice(0, 100);
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For now, we have a simplified model that only models methods as sinks. It does not model methods as neutral,
|
||||||
|
* so we aren't actually able to correctly determine that a method is neutral; it could still be a source or summary.
|
||||||
|
* However, to keep this method simple and give output to the user, we will model any method for which none of its
|
||||||
|
* arguments are modeled as sinks as neutral.
|
||||||
|
*
|
||||||
|
* If there are multiple arguments which are modeled as sinks, we will only model the first one.
|
||||||
|
*/
|
||||||
|
export function parsePredictedClassifications(
|
||||||
|
predicted: Method[],
|
||||||
|
): Record<string, ModeledMethod> {
|
||||||
|
const predictedBySignature: Record<string, Method[]> = {};
|
||||||
|
for (const method of predicted) {
|
||||||
|
if (!method.classification) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const signature = toFullMethodSignature(method);
|
||||||
|
|
||||||
|
if (!(signature in predictedBySignature)) {
|
||||||
|
predictedBySignature[signature] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
predictedBySignature[signature].push(method);
|
||||||
|
}
|
||||||
|
|
||||||
|
const modeledMethods: Record<string, ModeledMethod> = {};
|
||||||
|
|
||||||
|
for (const signature in predictedBySignature) {
|
||||||
|
const predictedMethods = predictedBySignature[signature];
|
||||||
|
|
||||||
|
const sinks = predictedMethods.filter(
|
||||||
|
(method) => method.classification?.type === ClassificationType.Sink,
|
||||||
|
);
|
||||||
|
if (sinks.length === 0) {
|
||||||
|
// For now, model any method for which none of its arguments are modeled as sinks as neutral
|
||||||
|
modeledMethods[signature] = {
|
||||||
|
type: "neutral",
|
||||||
|
kind: "",
|
||||||
|
input: "",
|
||||||
|
output: "",
|
||||||
|
};
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order the sinks by the input alphabetically. This will ensure that the first argument is always
|
||||||
|
// first in the list of sinks, the second argument is always second, etc.
|
||||||
|
// If we get back "Argument[1]" and "Argument[3]", "Argument[1]" should always be first
|
||||||
|
sinks.sort((a, b) => compareInputOutput(a.input ?? "", b.input ?? ""));
|
||||||
|
|
||||||
|
const sink = sinks[0];
|
||||||
|
|
||||||
|
modeledMethods[signature] = {
|
||||||
|
type: "sink",
|
||||||
|
kind: sink.classification?.kind ?? "",
|
||||||
|
input: sink.input ?? "",
|
||||||
|
output: sink.output ?? "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return modeledMethods;
|
||||||
|
}
|
||||||
|
|
||||||
|
function toMethodClassificationType(
|
||||||
|
type: ModeledMethodType,
|
||||||
|
): ClassificationType {
|
||||||
|
switch (type) {
|
||||||
|
case "source":
|
||||||
|
return ClassificationType.Source;
|
||||||
|
case "sink":
|
||||||
|
return ClassificationType.Sink;
|
||||||
|
case "summary":
|
||||||
|
return ClassificationType.Summary;
|
||||||
|
case "neutral":
|
||||||
|
return ClassificationType.Neutral;
|
||||||
|
default:
|
||||||
|
return ClassificationType.Unknown;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function toMethodClassification(modeledMethod: ModeledMethod): Classification {
|
||||||
|
return {
|
||||||
|
type: toMethodClassificationType(modeledMethod.type),
|
||||||
|
kind: modeledMethod.kind,
|
||||||
|
explanation: "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function toFullMethodSignature(method: Method): string {
|
||||||
|
return `${method.package}.${method.type}#${method.name}${method.signature}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const argumentRegex = /^Argument\[(\d+)]$/;
|
||||||
|
|
||||||
|
// Argument[this] is before ReturnValue
|
||||||
|
const nonNumericArgumentOrder = ["Argument[this]", "ReturnValue"];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two inputs or outputs matching `Argument[<number>]`, `Argument[this]`, or `ReturnValue`.
|
||||||
|
* If they are the same, return 0. If a is less than b, returns a negative number.
|
||||||
|
* If a is greater than b, returns a positive number.
|
||||||
|
*/
|
||||||
|
export function compareInputOutput(a: string, b: string): number {
|
||||||
|
if (a === b) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const aMatch = a.match(argumentRegex);
|
||||||
|
const bMatch = b.match(argumentRegex);
|
||||||
|
|
||||||
|
// Numeric arguments are always first
|
||||||
|
if (aMatch && !bMatch) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (!aMatch && bMatch) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Neither is an argument
|
||||||
|
if (!aMatch && !bMatch) {
|
||||||
|
const aIndex = nonNumericArgumentOrder.indexOf(a);
|
||||||
|
const bIndex = nonNumericArgumentOrder.indexOf(b);
|
||||||
|
|
||||||
|
// If either one is unknown, it is sorted last
|
||||||
|
if (aIndex === -1 && bIndex === -1) {
|
||||||
|
return a.localeCompare(b);
|
||||||
|
}
|
||||||
|
if (aIndex === -1) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (bIndex === -1) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return aIndex - bIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This case shouldn't happen, but makes TypeScript happy
|
||||||
|
if (!aMatch || !bMatch) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Both are arguments
|
||||||
|
const aIndex = parseInt(aMatch[1]);
|
||||||
|
const bIndex = parseInt(bMatch[1]);
|
||||||
|
|
||||||
|
return aIndex - bIndex;
|
||||||
|
}
|
||||||
@@ -7,9 +7,9 @@ export function decodeBqrsToExternalApiUsages(
|
|||||||
const methodsByApiName = new Map<string, ExternalApiUsage>();
|
const methodsByApiName = new Map<string, ExternalApiUsage>();
|
||||||
|
|
||||||
chunk?.tuples.forEach((tuple) => {
|
chunk?.tuples.forEach((tuple) => {
|
||||||
const signature = tuple[0] as string;
|
const usage = tuple[0] as Call;
|
||||||
const supported = tuple[1] as boolean;
|
const signature = tuple[1] as string;
|
||||||
const usage = tuple[2] as Call;
|
const supported = (tuple[2] as string) === "true";
|
||||||
|
|
||||||
const [packageWithType, methodDeclaration] = signature.split("#");
|
const [packageWithType, methodDeclaration] = signature.split("#");
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,13 @@ import { createDataExtensionYaml, loadDataExtensionYaml } from "./yaml";
|
|||||||
import { ExternalApiUsage } from "./external-api-usage";
|
import { ExternalApiUsage } from "./external-api-usage";
|
||||||
import { ModeledMethod } from "./modeled-method";
|
import { ModeledMethod } from "./modeled-method";
|
||||||
import { ExtensionPackModelFile } from "./shared/extension-pack";
|
import { ExtensionPackModelFile } from "./shared/extension-pack";
|
||||||
|
import { autoModel } from "./auto-model-api";
|
||||||
|
import {
|
||||||
|
createAutoModelRequest,
|
||||||
|
parsePredictedClassifications,
|
||||||
|
} from "./auto-model";
|
||||||
|
import { showLlmGeneration } from "../config";
|
||||||
|
import { getAutoModelUsages } from "./auto-model-usages-query";
|
||||||
|
|
||||||
export class DataExtensionsEditorView extends AbstractWebview<
|
export class DataExtensionsEditorView extends AbstractWebview<
|
||||||
ToDataExtensionsEditorMessage,
|
ToDataExtensionsEditorMessage,
|
||||||
@@ -113,6 +120,13 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
case "generateExternalApi":
|
case "generateExternalApi":
|
||||||
await this.generateModeledMethods();
|
await this.generateModeledMethods();
|
||||||
|
|
||||||
|
break;
|
||||||
|
case "generateExternalApiFromLlm":
|
||||||
|
await this.generateModeledMethodsFromLlm(
|
||||||
|
msg.externalApiUsages,
|
||||||
|
msg.modeledMethods,
|
||||||
|
);
|
||||||
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
assertNever(msg);
|
assertNever(msg);
|
||||||
@@ -135,6 +149,7 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
viewState: {
|
viewState: {
|
||||||
extensionPackModelFile: this.modelFile,
|
extensionPackModelFile: this.modelFile,
|
||||||
modelFileExists: await pathExists(this.modelFile.filename),
|
modelFileExists: await pathExists(this.modelFile.filename),
|
||||||
|
showLlmButton: showLlmGeneration(),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -347,6 +362,72 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
await this.clearProgress();
|
await this.clearProgress();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async generateModeledMethodsFromLlm(
|
||||||
|
externalApiUsages: ExternalApiUsage[],
|
||||||
|
modeledMethods: Record<string, ModeledMethod>,
|
||||||
|
): Promise<void> {
|
||||||
|
const maxStep = 3000;
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 0,
|
||||||
|
maxStep,
|
||||||
|
message: "Retrieving usages",
|
||||||
|
});
|
||||||
|
|
||||||
|
const usages = await getAutoModelUsages({
|
||||||
|
cliServer: this.cliServer,
|
||||||
|
queryRunner: this.queryRunner,
|
||||||
|
queryStorageDir: this.queryStorageDir,
|
||||||
|
databaseItem: this.databaseItem,
|
||||||
|
progress: (update) => this.showProgress(update, maxStep),
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 1800,
|
||||||
|
maxStep,
|
||||||
|
message: "Creating request",
|
||||||
|
});
|
||||||
|
|
||||||
|
const request = createAutoModelRequest(
|
||||||
|
this.databaseItem.language,
|
||||||
|
externalApiUsages,
|
||||||
|
modeledMethods,
|
||||||
|
usages,
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 2000,
|
||||||
|
maxStep,
|
||||||
|
message: "Sending request",
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await autoModel(this.app.credentials, request);
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 2500,
|
||||||
|
maxStep,
|
||||||
|
message: "Parsing response",
|
||||||
|
});
|
||||||
|
|
||||||
|
const predictedModeledMethods = parsePredictedClassifications(
|
||||||
|
response.predicted,
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 2800,
|
||||||
|
maxStep,
|
||||||
|
message: "Applying results",
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.postMessage({
|
||||||
|
t: "addModeledMethods",
|
||||||
|
modeledMethods: predictedModeledMethods,
|
||||||
|
overrideNone: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.clearProgress();
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Progress in this class is a bit weird. Most of the progress is based on running the query.
|
* Progress in this class is a bit weird. Most of the progress is based on running the query.
|
||||||
* Query progress is always between 0 and 1000. However, we still have some steps that need
|
* Query progress is always between 0 and 1000. However, we still have some steps that need
|
||||||
|
|||||||
@@ -78,7 +78,11 @@ export async function runQuery({
|
|||||||
|
|
||||||
const queryRun = queryRunner.createQueryRun(
|
const queryRun = queryRunner.createQueryRun(
|
||||||
databaseItem.databaseUri.fsPath,
|
databaseItem.databaseUri.fsPath,
|
||||||
{ queryPath: queryFile, quickEvalPosition: undefined },
|
{
|
||||||
|
queryPath: queryFile,
|
||||||
|
quickEvalPosition: undefined,
|
||||||
|
quickEvalCountOnly: false,
|
||||||
|
},
|
||||||
false,
|
false,
|
||||||
getOnDiskWorkspaceFolders(),
|
getOnDiskWorkspaceFolders(),
|
||||||
extensionPacks,
|
extensionPacks,
|
||||||
|
|||||||
@@ -92,7 +92,11 @@ async function getModeledMethodsFromFlow(
|
|||||||
|
|
||||||
const queryRun = queryRunner.createQueryRun(
|
const queryRun = queryRunner.createQueryRun(
|
||||||
databaseItem.databaseUri.fsPath,
|
databaseItem.databaseUri.fsPath,
|
||||||
{ queryPath, quickEvalPosition: undefined },
|
{
|
||||||
|
queryPath,
|
||||||
|
quickEvalPosition: undefined,
|
||||||
|
quickEvalCountOnly: false,
|
||||||
|
},
|
||||||
false,
|
false,
|
||||||
getOnDiskWorkspaceFolders(),
|
getOnDiskWorkspaceFolders(),
|
||||||
undefined,
|
undefined,
|
||||||
|
|||||||
@@ -116,13 +116,14 @@ export const extensiblePredicateDefinitions: Record<
|
|||||||
neutral: {
|
neutral: {
|
||||||
extensiblePredicate: "neutralModel",
|
extensiblePredicate: "neutralModel",
|
||||||
// extensible predicate neutralModel(
|
// extensible predicate neutralModel(
|
||||||
// string package, string type, string name, string signature, string provenance
|
// string package, string type, string name, string signature, string kind, string provenance
|
||||||
// );
|
// );
|
||||||
generateMethodDefinition: (method) => [
|
generateMethodDefinition: (method) => [
|
||||||
method.externalApiUsage.packageName,
|
method.externalApiUsage.packageName,
|
||||||
method.externalApiUsage.typeName,
|
method.externalApiUsage.typeName,
|
||||||
method.externalApiUsage.methodName,
|
method.externalApiUsage.methodName,
|
||||||
method.externalApiUsage.methodParameters,
|
method.externalApiUsage.methodParameters,
|
||||||
|
method.modeledMethod.kind,
|
||||||
"manual",
|
"manual",
|
||||||
],
|
],
|
||||||
readModeledMethod: (row) => ({
|
readModeledMethod: (row) => ({
|
||||||
@@ -131,8 +132,9 @@ export const extensiblePredicateDefinitions: Record<
|
|||||||
type: "neutral",
|
type: "neutral",
|
||||||
input: "",
|
input: "",
|
||||||
output: "",
|
output: "",
|
||||||
kind: "",
|
kind: row[4] as string,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
supportedKinds: ["summary", "source", "sink"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,30 +5,28 @@ export const fetchExternalApisQuery: Query = {
|
|||||||
* @name Usage of APIs coming from external libraries
|
* @name Usage of APIs coming from external libraries
|
||||||
* @description A list of 3rd party APIs used in the codebase.
|
* @description A list of 3rd party APIs used in the codebase.
|
||||||
* @tags telemetry
|
* @tags telemetry
|
||||||
|
* @kind problem
|
||||||
* @id cs/telemetry/fetch-external-apis
|
* @id cs/telemetry/fetch-external-apis
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import csharp
|
import csharp
|
||||||
import semmle.code.csharp.dataflow.internal.FlowSummaryImpl as FlowSummaryImpl
|
import ExternalApi
|
||||||
import ExternalApi
|
|
||||||
|
private Call aUsage(ExternalApi api) { result.getTarget().getUnboundDeclaration() = api }
|
||||||
private Call aUsage(ExternalApi api) {
|
|
||||||
result.getTarget().getUnboundDeclaration() = api
|
private boolean isSupported(ExternalApi api) {
|
||||||
}
|
api.isSupported() and result = true
|
||||||
|
or
|
||||||
private boolean isSupported(ExternalApi api) {
|
not api.isSupported() and
|
||||||
api.isSupported() and result = true
|
result = false
|
||||||
or
|
}
|
||||||
not api.isSupported() and
|
|
||||||
result = false
|
from ExternalApi api, string apiName, boolean supported, Call usage
|
||||||
}
|
where
|
||||||
|
apiName = api.getApiName() and
|
||||||
from ExternalApi api, string apiName, boolean supported, Call usage
|
supported = isSupported(api) and
|
||||||
where
|
usage = aUsage(api)
|
||||||
apiName = api.getApiName() and
|
select usage, apiName, supported.toString(), "supported"
|
||||||
supported = isSupported(api) and
|
|
||||||
usage = aUsage(api)
|
|
||||||
select apiName, supported, usage
|
|
||||||
`,
|
`,
|
||||||
dependencies: {
|
dependencies: {
|
||||||
"ExternalApi.qll": `/** Provides classes and predicates related to handling APIs from external libraries. */
|
"ExternalApi.qll": `/** Provides classes and predicates related to handling APIs from external libraries. */
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ export const fetchExternalApisQuery: Query = {
|
|||||||
* @name Usage of APIs coming from external libraries
|
* @name Usage of APIs coming from external libraries
|
||||||
* @description A list of 3rd party APIs used in the codebase. Excludes test and generated code.
|
* @description A list of 3rd party APIs used in the codebase. Excludes test and generated code.
|
||||||
* @tags telemetry
|
* @tags telemetry
|
||||||
|
* @kind problem
|
||||||
* @id java/telemetry/fetch-external-apis
|
* @id java/telemetry/fetch-external-apis
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java
|
import java
|
||||||
import semmle.code.java.dataflow.internal.FlowSummaryImpl as FlowSummaryImpl
|
|
||||||
import ExternalApi
|
import ExternalApi
|
||||||
|
|
||||||
private Call aUsage(ExternalApi api) {
|
private Call aUsage(ExternalApi api) {
|
||||||
@@ -28,7 +28,7 @@ where
|
|||||||
apiName = api.getApiName() and
|
apiName = api.getApiName() and
|
||||||
supported = isSupported(api) and
|
supported = isSupported(api) and
|
||||||
usage = aUsage(api)
|
usage = aUsage(api)
|
||||||
select apiName, supported, usage
|
select usage, apiName, supported.toString(), "supported"
|
||||||
`,
|
`,
|
||||||
dependencies: {
|
dependencies: {
|
||||||
"ExternalApi.qll": `/** Provides classes and predicates related to handling APIs from external libraries. */
|
"ExternalApi.qll": `/** Provides classes and predicates related to handling APIs from external libraries. */
|
||||||
|
|||||||
@@ -1,4 +1,13 @@
|
|||||||
export type Query = {
|
export type Query = {
|
||||||
|
/**
|
||||||
|
* The main query.
|
||||||
|
*
|
||||||
|
* It should select all usages of external APIs, and return the following result pattern:
|
||||||
|
* - usage: the usage of the external API. This is an entity.
|
||||||
|
* - apiName: the name of the external API. This is a string.
|
||||||
|
* - supported: whether the external API is supported by the extension. This should be a string representation of a boolean to satify the result pattern for a problem query.
|
||||||
|
* - "supported": a string literal. This is required to make the query a valid problem query.
|
||||||
|
*/
|
||||||
mainQuery: string;
|
mainQuery: string;
|
||||||
dependencies?: {
|
dependencies?: {
|
||||||
[filename: string]: string;
|
[filename: string]: string;
|
||||||
|
|||||||
@@ -3,4 +3,5 @@ import { ExtensionPackModelFile } from "./extension-pack";
|
|||||||
export interface DataExtensionEditorViewState {
|
export interface DataExtensionEditorViewState {
|
||||||
extensionPackModelFile: ExtensionPackModelFile;
|
extensionPackModelFile: ExtensionPackModelFile;
|
||||||
modelFileExists: boolean;
|
modelFileExists: boolean;
|
||||||
|
showLlmButton: boolean;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,7 +61,9 @@ export class DbConfigStore extends DisposableObject {
|
|||||||
this.configErrors = [];
|
this.configErrors = [];
|
||||||
this.configWatcher = undefined;
|
this.configWatcher = undefined;
|
||||||
this.configValidator = new DbConfigValidator(app.extensionPath);
|
this.configValidator = new DbConfigValidator(app.extensionPath);
|
||||||
this.onDidChangeConfigEventEmitter = app.createEventEmitter<void>();
|
this.onDidChangeConfigEventEmitter = this.push(
|
||||||
|
app.createEventEmitter<void>(),
|
||||||
|
);
|
||||||
this.onDidChangeConfig = this.onDidChangeConfigEventEmitter.event;
|
this.onDidChangeConfig = this.onDidChangeConfigEventEmitter.event;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,10 +147,46 @@ export class DbConfigStore extends DisposableObject {
|
|||||||
await this.writeConfig(config);
|
await this.writeConfig(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a list of remote repositories to an existing repository list and removes duplicates.
|
||||||
|
* @returns a list of repositories that were not added because the list reached 1000 entries.
|
||||||
|
*/
|
||||||
|
public async addRemoteReposToList(
|
||||||
|
repoNwoList: string[],
|
||||||
|
parentList: string,
|
||||||
|
): Promise<string[]> {
|
||||||
|
if (!this.config) {
|
||||||
|
throw Error("Cannot add variant analysis repos if config is not loaded");
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = cloneDbConfig(this.config);
|
||||||
|
const parent = config.databases.variantAnalysis.repositoryLists.find(
|
||||||
|
(list) => list.name === parentList,
|
||||||
|
);
|
||||||
|
if (!parent) {
|
||||||
|
throw Error(`Cannot find parent list '${parentList}'`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates from the list of repositories.
|
||||||
|
const newRepositoriesList = [
|
||||||
|
...new Set([...parent.repositories, ...repoNwoList]),
|
||||||
|
];
|
||||||
|
|
||||||
|
parent.repositories = newRepositoriesList.slice(0, 1000);
|
||||||
|
const truncatedRepositories = newRepositoriesList.slice(1000);
|
||||||
|
|
||||||
|
await this.writeConfig(config);
|
||||||
|
return truncatedRepositories;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds one remote repository
|
||||||
|
* @returns either nothing, or, if a parentList is given AND the number of repos on that list reaches 1000 returns the repo that was not added.
|
||||||
|
*/
|
||||||
public async addRemoteRepo(
|
public async addRemoteRepo(
|
||||||
repoNwo: string,
|
repoNwo: string,
|
||||||
parentList?: string,
|
parentList?: string,
|
||||||
): Promise<void> {
|
): Promise<string[]> {
|
||||||
if (!this.config) {
|
if (!this.config) {
|
||||||
throw Error("Cannot add variant analysis repo if config is not loaded");
|
throw Error("Cannot add variant analysis repo if config is not loaded");
|
||||||
}
|
}
|
||||||
@@ -163,6 +201,7 @@ export class DbConfigStore extends DisposableObject {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const truncatedRepositories = [];
|
||||||
const config = cloneDbConfig(this.config);
|
const config = cloneDbConfig(this.config);
|
||||||
if (parentList) {
|
if (parentList) {
|
||||||
const parent = config.databases.variantAnalysis.repositoryLists.find(
|
const parent = config.databases.variantAnalysis.repositoryLists.find(
|
||||||
@@ -171,12 +210,15 @@ export class DbConfigStore extends DisposableObject {
|
|||||||
if (!parent) {
|
if (!parent) {
|
||||||
throw Error(`Cannot find parent list '${parentList}'`);
|
throw Error(`Cannot find parent list '${parentList}'`);
|
||||||
} else {
|
} else {
|
||||||
parent.repositories.push(repoNwo);
|
const newRepositories = [...parent.repositories, repoNwo];
|
||||||
|
parent.repositories = newRepositories.slice(0, 1000);
|
||||||
|
truncatedRepositories.push(...newRepositories.slice(1000));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
config.databases.variantAnalysis.repositories.push(repoNwo);
|
config.databases.variantAnalysis.repositories.push(repoNwo);
|
||||||
}
|
}
|
||||||
await this.writeConfig(config);
|
await this.writeConfig(config);
|
||||||
|
return truncatedRepositories;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async addRemoteOwner(owner: string): Promise<void> {
|
public async addRemoteOwner(owner: string): Promise<void> {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { App } from "../common/app";
|
import { App } from "../common/app";
|
||||||
import { AppEvent, AppEventEmitter } from "../common/events";
|
import { AppEvent, AppEventEmitter } from "../common/events";
|
||||||
import { ValueResult } from "../common/value-result";
|
import { ValueResult } from "../common/value-result";
|
||||||
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
import { DbConfigStore } from "./config/db-config-store";
|
import { DbConfigStore } from "./config/db-config-store";
|
||||||
import {
|
import {
|
||||||
DbItem,
|
DbItem,
|
||||||
@@ -23,7 +24,7 @@ import {
|
|||||||
import { createRemoteTree } from "./db-tree-creator";
|
import { createRemoteTree } from "./db-tree-creator";
|
||||||
import { DbConfigValidationError } from "./db-validation-errors";
|
import { DbConfigValidationError } from "./db-validation-errors";
|
||||||
|
|
||||||
export class DbManager {
|
export class DbManager extends DisposableObject {
|
||||||
public readonly onDbItemsChanged: AppEvent<void>;
|
public readonly onDbItemsChanged: AppEvent<void>;
|
||||||
public static readonly DB_EXPANDED_STATE_KEY = "db_expanded";
|
public static readonly DB_EXPANDED_STATE_KEY = "db_expanded";
|
||||||
private readonly onDbItemsChangesEventEmitter: AppEventEmitter<void>;
|
private readonly onDbItemsChangesEventEmitter: AppEventEmitter<void>;
|
||||||
@@ -32,7 +33,11 @@ export class DbManager {
|
|||||||
private readonly app: App,
|
private readonly app: App,
|
||||||
private readonly dbConfigStore: DbConfigStore,
|
private readonly dbConfigStore: DbConfigStore,
|
||||||
) {
|
) {
|
||||||
this.onDbItemsChangesEventEmitter = app.createEventEmitter<void>();
|
super();
|
||||||
|
|
||||||
|
this.onDbItemsChangesEventEmitter = this.push(
|
||||||
|
app.createEventEmitter<void>(),
|
||||||
|
);
|
||||||
this.onDbItemsChanged = this.onDbItemsChangesEventEmitter.event;
|
this.onDbItemsChanged = this.onDbItemsChangesEventEmitter.event;
|
||||||
|
|
||||||
this.dbConfigStore.onDidChangeConfig(() => {
|
this.dbConfigStore.onDidChangeConfig(() => {
|
||||||
@@ -96,8 +101,15 @@ export class DbManager {
|
|||||||
public async addNewRemoteRepo(
|
public async addNewRemoteRepo(
|
||||||
nwo: string,
|
nwo: string,
|
||||||
parentList?: string,
|
parentList?: string,
|
||||||
): Promise<void> {
|
): Promise<string[]> {
|
||||||
await this.dbConfigStore.addRemoteRepo(nwo, parentList);
|
return await this.dbConfigStore.addRemoteRepo(nwo, parentList);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async addNewRemoteReposToList(
|
||||||
|
nwoList: string[],
|
||||||
|
parentList: string,
|
||||||
|
): Promise<string[]> {
|
||||||
|
return await this.dbConfigStore.addRemoteReposToList(nwoList, parentList);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async addNewRemoteOwner(owner: string): Promise<void> {
|
public async addNewRemoteOwner(owner: string): Promise<void> {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export class DbModule extends DisposableObject {
|
|||||||
super();
|
super();
|
||||||
|
|
||||||
this.dbConfigStore = new DbConfigStore(app);
|
this.dbConfigStore = new DbConfigStore(app);
|
||||||
this.dbManager = new DbManager(app, this.dbConfigStore);
|
this.dbManager = this.push(new DbManager(app, this.dbConfigStore));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async initialize(app: App): Promise<DbModule> {
|
public static async initialize(app: App): Promise<DbModule> {
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ import { LocalDatabasesCommands } from "../common/commands";
|
|||||||
import {
|
import {
|
||||||
createMultiSelectionCommand,
|
createMultiSelectionCommand,
|
||||||
createSingleSelectionCommand,
|
createSingleSelectionCommand,
|
||||||
} from "../common/selection-commands";
|
} from "../common/vscode/selection-commands";
|
||||||
|
|
||||||
enum SortOrder {
|
enum SortOrder {
|
||||||
NameAsc = "NameAsc",
|
NameAsc = "NameAsc",
|
||||||
|
|||||||
@@ -0,0 +1,30 @@
|
|||||||
|
import vscode from "vscode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The layout of the database.
|
||||||
|
*/
|
||||||
|
export enum DatabaseKind {
|
||||||
|
/** A CodeQL database */
|
||||||
|
Database,
|
||||||
|
/** A raw QL dataset */
|
||||||
|
RawDataset,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DatabaseContents {
|
||||||
|
/** The layout of the database */
|
||||||
|
kind: DatabaseKind;
|
||||||
|
/**
|
||||||
|
* The name of the database.
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/** The URI of the QL dataset within the database. */
|
||||||
|
datasetUri: vscode.Uri;
|
||||||
|
/** The URI of the source archive within the database, if one exists. */
|
||||||
|
sourceArchiveUri?: vscode.Uri;
|
||||||
|
/** The URI of the CodeQL database scheme within the database, if exactly one exists. */
|
||||||
|
dbSchemeUri?: vscode.Uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DatabaseContentsWithDbScheme extends DatabaseContents {
|
||||||
|
dbSchemeUri: vscode.Uri; // Always present
|
||||||
|
}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
import { DatabaseItem } from "./database-item";
|
||||||
|
|
||||||
|
export enum DatabaseEventKind {
|
||||||
|
Add = "Add",
|
||||||
|
Remove = "Remove",
|
||||||
|
|
||||||
|
// Fired when databases are refreshed from persisted state
|
||||||
|
Refresh = "Refresh",
|
||||||
|
|
||||||
|
// Fired when the current database changes
|
||||||
|
Change = "Change",
|
||||||
|
|
||||||
|
Rename = "Rename",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DatabaseChangedEvent {
|
||||||
|
kind: DatabaseEventKind;
|
||||||
|
item: DatabaseItem | undefined;
|
||||||
|
}
|
||||||
@@ -0,0 +1,213 @@
|
|||||||
|
// Exported for testing
|
||||||
|
import * as cli from "../../codeql-cli/cli";
|
||||||
|
import vscode from "vscode";
|
||||||
|
import { FullDatabaseOptions } from "./database-options";
|
||||||
|
import { basename, dirname, join, relative } from "path";
|
||||||
|
import {
|
||||||
|
decodeSourceArchiveUri,
|
||||||
|
encodeArchiveBasePath,
|
||||||
|
encodeSourceArchiveUri,
|
||||||
|
zipArchiveScheme,
|
||||||
|
} from "../../common/vscode/archive-filesystem-provider";
|
||||||
|
import { DatabaseItem, PersistedDatabaseItem } from "./database-item";
|
||||||
|
import { isLikelyDatabaseRoot } from "../../helpers";
|
||||||
|
import { stat } from "fs-extra";
|
||||||
|
import { pathsEqual } from "../../pure/files";
|
||||||
|
import { DatabaseContents } from "./database-contents";
|
||||||
|
|
||||||
|
export class DatabaseItemImpl implements DatabaseItem {
|
||||||
|
// These are only public in the implementation, they are readonly in the interface
|
||||||
|
public error: Error | undefined = undefined;
|
||||||
|
public contents: DatabaseContents | undefined;
|
||||||
|
/** A cache of database info */
|
||||||
|
private _dbinfo: cli.DbInfo | undefined;
|
||||||
|
|
||||||
|
public constructor(
|
||||||
|
public readonly databaseUri: vscode.Uri,
|
||||||
|
contents: DatabaseContents | undefined,
|
||||||
|
private options: FullDatabaseOptions,
|
||||||
|
) {
|
||||||
|
this.contents = contents;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get name(): string {
|
||||||
|
if (this.options.displayName) {
|
||||||
|
return this.options.displayName;
|
||||||
|
} else if (this.contents) {
|
||||||
|
return this.contents.name;
|
||||||
|
} else {
|
||||||
|
return basename(this.databaseUri.fsPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public set name(newName: string) {
|
||||||
|
this.options.displayName = newName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get sourceArchive(): vscode.Uri | undefined {
|
||||||
|
if (this.options.ignoreSourceArchive || this.contents === undefined) {
|
||||||
|
return undefined;
|
||||||
|
} else {
|
||||||
|
return this.contents.sourceArchiveUri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public get dateAdded(): number | undefined {
|
||||||
|
return this.options.dateAdded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public resolveSourceFile(uriStr: string | undefined): vscode.Uri {
|
||||||
|
const sourceArchive = this.sourceArchive;
|
||||||
|
const uri = uriStr ? vscode.Uri.parse(uriStr, true) : undefined;
|
||||||
|
if (uri && uri.scheme !== "file") {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid uri scheme in ${uriStr}. Only 'file' is allowed.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!sourceArchive) {
|
||||||
|
if (uri) {
|
||||||
|
return uri;
|
||||||
|
} else {
|
||||||
|
return this.databaseUri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uri) {
|
||||||
|
const relativeFilePath = decodeURI(uri.path)
|
||||||
|
.replace(":", "_")
|
||||||
|
.replace(/^\/*/, "");
|
||||||
|
if (sourceArchive.scheme === zipArchiveScheme) {
|
||||||
|
const zipRef = decodeSourceArchiveUri(sourceArchive);
|
||||||
|
const pathWithinSourceArchive =
|
||||||
|
zipRef.pathWithinSourceArchive === "/"
|
||||||
|
? relativeFilePath
|
||||||
|
: `${zipRef.pathWithinSourceArchive}/${relativeFilePath}`;
|
||||||
|
return encodeSourceArchiveUri({
|
||||||
|
pathWithinSourceArchive,
|
||||||
|
sourceArchiveZipPath: zipRef.sourceArchiveZipPath,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let newPath = sourceArchive.path;
|
||||||
|
if (!newPath.endsWith("/")) {
|
||||||
|
// Ensure a trailing slash.
|
||||||
|
newPath += "/";
|
||||||
|
}
|
||||||
|
newPath += relativeFilePath;
|
||||||
|
|
||||||
|
return sourceArchive.with({ path: newPath });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return sourceArchive;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the state of this database, to be persisted in the workspace state.
|
||||||
|
*/
|
||||||
|
public getPersistedState(): PersistedDatabaseItem {
|
||||||
|
return {
|
||||||
|
uri: this.databaseUri.toString(true),
|
||||||
|
options: this.options,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds if the database item refers to an exported snapshot
|
||||||
|
*/
|
||||||
|
public async hasMetadataFile(): Promise<boolean> {
|
||||||
|
return await isLikelyDatabaseRoot(this.databaseUri.fsPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns information about a database.
|
||||||
|
*/
|
||||||
|
private async getDbInfo(server: cli.CodeQLCliServer): Promise<cli.DbInfo> {
|
||||||
|
if (this._dbinfo === undefined) {
|
||||||
|
this._dbinfo = await server.resolveDatabase(this.databaseUri.fsPath);
|
||||||
|
}
|
||||||
|
return this._dbinfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns `sourceLocationPrefix` of database. Requires that the database
|
||||||
|
* has a `.dbinfo` file, which is the source of the prefix.
|
||||||
|
*/
|
||||||
|
public async getSourceLocationPrefix(
|
||||||
|
server: cli.CodeQLCliServer,
|
||||||
|
): Promise<string> {
|
||||||
|
const dbInfo = await this.getDbInfo(server);
|
||||||
|
return dbInfo.sourceLocationPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns path to dataset folder of database.
|
||||||
|
*/
|
||||||
|
public async getDatasetFolder(server: cli.CodeQLCliServer): Promise<string> {
|
||||||
|
const dbInfo = await this.getDbInfo(server);
|
||||||
|
return dbInfo.datasetFolder;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get language() {
|
||||||
|
return this.options.language || "";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the root uri of the virtual filesystem for this database's source archive.
|
||||||
|
*/
|
||||||
|
public getSourceArchiveExplorerUri(): vscode.Uri {
|
||||||
|
const sourceArchive = this.sourceArchive;
|
||||||
|
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith(".zip")) {
|
||||||
|
throw new Error(this.verifyZippedSources());
|
||||||
|
}
|
||||||
|
return encodeArchiveBasePath(sourceArchive.fsPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public verifyZippedSources(): string | undefined {
|
||||||
|
const sourceArchive = this.sourceArchive;
|
||||||
|
if (sourceArchive === undefined) {
|
||||||
|
return `${this.name} has no source archive.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sourceArchive.fsPath.endsWith(".zip")) {
|
||||||
|
return `${this.name} has a source folder that is unzipped.`;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds if `uri` belongs to this database's source archive.
|
||||||
|
*/
|
||||||
|
public belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean {
|
||||||
|
if (this.sourceArchive === undefined) return false;
|
||||||
|
return (
|
||||||
|
uri.scheme === zipArchiveScheme &&
|
||||||
|
decodeSourceArchiveUri(uri).sourceArchiveZipPath ===
|
||||||
|
this.sourceArchive.fsPath
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async isAffectedByTest(testPath: string): Promise<boolean> {
|
||||||
|
const databasePath = this.databaseUri.fsPath;
|
||||||
|
if (!databasePath.endsWith(".testproj")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const stats = await stat(testPath);
|
||||||
|
if (stats.isDirectory()) {
|
||||||
|
return !relative(testPath, databasePath).startsWith("..");
|
||||||
|
} else {
|
||||||
|
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
|
||||||
|
const testdir = dirname(testPath);
|
||||||
|
const testdirbase = basename(testdir);
|
||||||
|
return pathsEqual(
|
||||||
|
databasePath,
|
||||||
|
join(testdir, `${testdirbase}.testproj`),
|
||||||
|
process.platform,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// No information available for test path - assume database is unaffected.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,83 @@
|
|||||||
|
import vscode from "vscode";
|
||||||
|
import * as cli from "../../codeql-cli/cli";
|
||||||
|
import { DatabaseContents } from "./database-contents";
|
||||||
|
import { DatabaseOptions } from "./database-options";
|
||||||
|
|
||||||
|
/** An item in the list of available databases */
|
||||||
|
export interface DatabaseItem {
|
||||||
|
/** The URI of the database */
|
||||||
|
readonly databaseUri: vscode.Uri;
|
||||||
|
/** The name of the database to be displayed in the UI */
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
/** The primary language of the database or empty string if unknown */
|
||||||
|
readonly language: string;
|
||||||
|
/** The URI of the database's source archive, or `undefined` if no source archive is to be used. */
|
||||||
|
readonly sourceArchive: vscode.Uri | undefined;
|
||||||
|
/**
|
||||||
|
* The contents of the database.
|
||||||
|
* Will be `undefined` if the database is invalid. Can be updated by calling `refresh()`.
|
||||||
|
*/
|
||||||
|
readonly contents: DatabaseContents | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The date this database was added as a unix timestamp. Or undefined if we don't know.
|
||||||
|
*/
|
||||||
|
readonly dateAdded: number | undefined;
|
||||||
|
|
||||||
|
/** If the database is invalid, describes why. */
|
||||||
|
readonly error: Error | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolves a filename to its URI in the source archive.
|
||||||
|
*
|
||||||
|
* @param file Filename within the source archive. May be `undefined` to return a dummy file path.
|
||||||
|
*/
|
||||||
|
resolveSourceFile(file: string | undefined): vscode.Uri;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds if the database item has a `.dbinfo` or `codeql-database.yml` file.
|
||||||
|
*/
|
||||||
|
hasMetadataFile(): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns `sourceLocationPrefix` of exported database.
|
||||||
|
*/
|
||||||
|
getSourceLocationPrefix(server: cli.CodeQLCliServer): Promise<string>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns dataset folder of exported database.
|
||||||
|
*/
|
||||||
|
getDatasetFolder(server: cli.CodeQLCliServer): Promise<string>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the root uri of the virtual filesystem for this database's source archive,
|
||||||
|
* as displayed in the filesystem explorer.
|
||||||
|
*/
|
||||||
|
getSourceArchiveExplorerUri(): vscode.Uri;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds if `uri` belongs to this database's source archive.
|
||||||
|
*/
|
||||||
|
belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the database may be affected by test execution for the given path.
|
||||||
|
*/
|
||||||
|
isAffectedByTest(testPath: string): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the state of this database, to be persisted in the workspace state.
|
||||||
|
*/
|
||||||
|
getPersistedState(): PersistedDatabaseItem;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies that this database item has a zipped source folder. Returns an error message if it does not.
|
||||||
|
*/
|
||||||
|
verifyZippedSources(): string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PersistedDatabaseItem {
|
||||||
|
uri: string;
|
||||||
|
options?: DatabaseOptions;
|
||||||
|
}
|
||||||
@@ -1,50 +1,34 @@
|
|||||||
import { pathExists, stat, remove } from "fs-extra";
|
import vscode, { ExtensionContext } from "vscode";
|
||||||
import { glob } from "glob";
|
import { extLogger, Logger } from "../../common";
|
||||||
import { join, basename, resolve, relative, dirname, extname } from "path";
|
import { DisposableObject } from "../../pure/disposable-object";
|
||||||
import * as vscode from "vscode";
|
import { App } from "../../common/app";
|
||||||
import * as cli from "../codeql-cli/cli";
|
import { QueryRunner } from "../../query-server";
|
||||||
import { ExtensionContext } from "vscode";
|
import * as cli from "../../codeql-cli/cli";
|
||||||
import {
|
import { ProgressCallback, withProgress } from "../../common/vscode/progress";
|
||||||
showAndLogWarningMessage,
|
|
||||||
showAndLogInformationMessage,
|
|
||||||
isLikelyDatabaseRoot,
|
|
||||||
showAndLogExceptionWithTelemetry,
|
|
||||||
isFolderAlreadyInWorkspace,
|
|
||||||
getFirstWorkspaceFolder,
|
|
||||||
showNeverAskAgainDialog,
|
|
||||||
} from "../helpers";
|
|
||||||
import { ProgressCallback, withProgress } from "../common/vscode/progress";
|
|
||||||
import {
|
|
||||||
zipArchiveScheme,
|
|
||||||
encodeArchiveBasePath,
|
|
||||||
decodeSourceArchiveUri,
|
|
||||||
encodeSourceArchiveUri,
|
|
||||||
} from "../common/vscode/archive-filesystem-provider";
|
|
||||||
import { DisposableObject } from "../pure/disposable-object";
|
|
||||||
import { Logger, extLogger } from "../common";
|
|
||||||
import { asError, getErrorMessage } from "../pure/helpers-pure";
|
|
||||||
import { QueryRunner } from "../query-server";
|
|
||||||
import { containsPath, pathsEqual } from "../pure/files";
|
|
||||||
import { redactableError } from "../pure/errors";
|
|
||||||
import {
|
import {
|
||||||
getAutogenerateQlPacks,
|
getAutogenerateQlPacks,
|
||||||
isCodespacesTemplate,
|
isCodespacesTemplate,
|
||||||
setAutogenerateQlPacks,
|
setAutogenerateQlPacks,
|
||||||
} from "../config";
|
} from "../../config";
|
||||||
import { QlPackGenerator } from "../qlpack-generator";
|
import { extname, join } from "path";
|
||||||
import { QueryLanguage } from "../common/query-language";
|
import { FullDatabaseOptions } from "./database-options";
|
||||||
import { App } from "../common/app";
|
import { DatabaseItemImpl } from "./database-item-impl";
|
||||||
|
import {
|
||||||
|
getFirstWorkspaceFolder,
|
||||||
|
isFolderAlreadyInWorkspace,
|
||||||
|
showAndLogExceptionWithTelemetry,
|
||||||
|
showNeverAskAgainDialog,
|
||||||
|
} from "../../helpers";
|
||||||
import { existsSync } from "fs";
|
import { existsSync } from "fs";
|
||||||
|
import { QlPackGenerator } from "../../qlpack-generator";
|
||||||
/**
|
import { QueryLanguage } from "../../common/query-language";
|
||||||
* databases.ts
|
import { asError, getErrorMessage } from "../../pure/helpers-pure";
|
||||||
* ------------
|
import { DatabaseItem, PersistedDatabaseItem } from "./database-item";
|
||||||
* Managing state of what the current database is, and what other
|
import { redactableError } from "../../pure/errors";
|
||||||
* databases have been recently selected.
|
import { remove } from "fs-extra";
|
||||||
*
|
import { containsPath } from "../../pure/files";
|
||||||
* The source of truth of the current state resides inside the
|
import { DatabaseChangedEvent, DatabaseEventKind } from "./database-events";
|
||||||
* `DatabaseManager` class below.
|
import { DatabaseResolver } from "./database-resolver";
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The name of the key in the workspaceState dictionary in which we
|
* The name of the key in the workspaceState dictionary in which we
|
||||||
@@ -58,509 +42,6 @@ const CURRENT_DB = "currentDatabase";
|
|||||||
*/
|
*/
|
||||||
const DB_LIST = "databaseList";
|
const DB_LIST = "databaseList";
|
||||||
|
|
||||||
export interface DatabaseOptions {
|
|
||||||
displayName?: string;
|
|
||||||
ignoreSourceArchive?: boolean;
|
|
||||||
dateAdded?: number | undefined;
|
|
||||||
language?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FullDatabaseOptions extends DatabaseOptions {
|
|
||||||
ignoreSourceArchive: boolean;
|
|
||||||
dateAdded: number | undefined;
|
|
||||||
language: string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface PersistedDatabaseItem {
|
|
||||||
uri: string;
|
|
||||||
options?: DatabaseOptions;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The layout of the database.
|
|
||||||
*/
|
|
||||||
export enum DatabaseKind {
|
|
||||||
/** A CodeQL database */
|
|
||||||
Database,
|
|
||||||
/** A raw QL dataset */
|
|
||||||
RawDataset,
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DatabaseContents {
|
|
||||||
/** The layout of the database */
|
|
||||||
kind: DatabaseKind;
|
|
||||||
/**
|
|
||||||
* The name of the database.
|
|
||||||
*/
|
|
||||||
name: string;
|
|
||||||
/** The URI of the QL dataset within the database. */
|
|
||||||
datasetUri: vscode.Uri;
|
|
||||||
/** The URI of the source archive within the database, if one exists. */
|
|
||||||
sourceArchiveUri?: vscode.Uri;
|
|
||||||
/** The URI of the CodeQL database scheme within the database, if exactly one exists. */
|
|
||||||
dbSchemeUri?: vscode.Uri;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DatabaseContentsWithDbScheme extends DatabaseContents {
|
|
||||||
dbSchemeUri: vscode.Uri; // Always present
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An error thrown when we cannot find a valid database in a putative
|
|
||||||
* database directory.
|
|
||||||
*/
|
|
||||||
class InvalidDatabaseError extends Error {}
|
|
||||||
|
|
||||||
async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
|
|
||||||
/*
|
|
||||||
* Look directly in the root
|
|
||||||
*/
|
|
||||||
let dbRelativePaths = await glob("db-*/", {
|
|
||||||
cwd: parentDirectory,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (dbRelativePaths.length === 0) {
|
|
||||||
/*
|
|
||||||
* Check If they are in the old location
|
|
||||||
*/
|
|
||||||
dbRelativePaths = await glob("working/db-*/", {
|
|
||||||
cwd: parentDirectory,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (dbRelativePaths.length === 0) {
|
|
||||||
throw new InvalidDatabaseError(
|
|
||||||
`'${parentDirectory}' does not contain a dataset directory.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dbAbsolutePath = join(parentDirectory, dbRelativePaths[0]);
|
|
||||||
if (dbRelativePaths.length > 1) {
|
|
||||||
void showAndLogWarningMessage(
|
|
||||||
`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return vscode.Uri.file(dbAbsolutePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
// exported for testing
|
|
||||||
export async function findSourceArchive(
|
|
||||||
databasePath: string,
|
|
||||||
): Promise<vscode.Uri | undefined> {
|
|
||||||
const relativePaths = ["src", "output/src_archive"];
|
|
||||||
|
|
||||||
for (const relativePath of relativePaths) {
|
|
||||||
const basePath = join(databasePath, relativePath);
|
|
||||||
const zipPath = `${basePath}.zip`;
|
|
||||||
|
|
||||||
// Prefer using a zip archive over a directory.
|
|
||||||
if (await pathExists(zipPath)) {
|
|
||||||
return encodeArchiveBasePath(zipPath);
|
|
||||||
} else if (await pathExists(basePath)) {
|
|
||||||
return vscode.Uri.file(basePath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void showAndLogInformationMessage(
|
|
||||||
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`,
|
|
||||||
);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
|
|
||||||
async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
|
||||||
return await glob("*.dbscheme", { cwd: dbDirectory });
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DatabaseResolver {
|
|
||||||
public static async resolveDatabaseContents(
|
|
||||||
uri: vscode.Uri,
|
|
||||||
): Promise<DatabaseContentsWithDbScheme> {
|
|
||||||
if (uri.scheme !== "file") {
|
|
||||||
throw new Error(
|
|
||||||
`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const databasePath = uri.fsPath;
|
|
||||||
if (!(await pathExists(databasePath))) {
|
|
||||||
throw new InvalidDatabaseError(
|
|
||||||
`Database '${databasePath}' does not exist.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const contents = await this.resolveDatabase(databasePath);
|
|
||||||
|
|
||||||
if (contents === undefined) {
|
|
||||||
throw new InvalidDatabaseError(
|
|
||||||
`'${databasePath}' is not a valid database.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for a single dbscheme file within the database.
|
|
||||||
// This should be found in the dataset directory, regardless of the form of database.
|
|
||||||
const dbPath = contents.datasetUri.fsPath;
|
|
||||||
const dbSchemeFiles = await getDbSchemeFiles(dbPath);
|
|
||||||
if (dbSchemeFiles.length === 0) {
|
|
||||||
throw new InvalidDatabaseError(
|
|
||||||
`Database '${databasePath}' does not contain a CodeQL dbscheme under '${dbPath}'.`,
|
|
||||||
);
|
|
||||||
} else if (dbSchemeFiles.length > 1) {
|
|
||||||
throw new InvalidDatabaseError(
|
|
||||||
`Database '${databasePath}' contains multiple CodeQL dbschemes under '${dbPath}'.`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const dbSchemeUri = vscode.Uri.file(resolve(dbPath, dbSchemeFiles[0]));
|
|
||||||
return {
|
|
||||||
...contents,
|
|
||||||
dbSchemeUri,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async resolveDatabase(
|
|
||||||
databasePath: string,
|
|
||||||
): Promise<DatabaseContents> {
|
|
||||||
const name = basename(databasePath);
|
|
||||||
|
|
||||||
// Look for dataset and source archive.
|
|
||||||
const datasetUri = await findDataset(databasePath);
|
|
||||||
const sourceArchiveUri = await findSourceArchive(databasePath);
|
|
||||||
|
|
||||||
return {
|
|
||||||
kind: DatabaseKind.Database,
|
|
||||||
name,
|
|
||||||
datasetUri,
|
|
||||||
sourceArchiveUri,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** An item in the list of available databases */
|
|
||||||
export interface DatabaseItem {
|
|
||||||
/** The URI of the database */
|
|
||||||
readonly databaseUri: vscode.Uri;
|
|
||||||
/** The name of the database to be displayed in the UI */
|
|
||||||
name: string;
|
|
||||||
|
|
||||||
/** The primary language of the database or empty string if unknown */
|
|
||||||
readonly language: string;
|
|
||||||
/** The URI of the database's source archive, or `undefined` if no source archive is to be used. */
|
|
||||||
readonly sourceArchive: vscode.Uri | undefined;
|
|
||||||
/**
|
|
||||||
* The contents of the database.
|
|
||||||
* Will be `undefined` if the database is invalid. Can be updated by calling `refresh()`.
|
|
||||||
*/
|
|
||||||
readonly contents: DatabaseContents | undefined;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The date this database was added as a unix timestamp. Or undefined if we don't know.
|
|
||||||
*/
|
|
||||||
readonly dateAdded: number | undefined;
|
|
||||||
|
|
||||||
/** If the database is invalid, describes why. */
|
|
||||||
readonly error: Error | undefined;
|
|
||||||
/**
|
|
||||||
* Resolves the contents of the database.
|
|
||||||
*
|
|
||||||
* @remarks
|
|
||||||
* The contents include the database directory, source archive, and metadata about the database.
|
|
||||||
* If the database is invalid, `this.error` is updated with the error object that describes why
|
|
||||||
* the database is invalid. This error is also thrown.
|
|
||||||
*/
|
|
||||||
refresh(): Promise<void>;
|
|
||||||
/**
|
|
||||||
* Resolves a filename to its URI in the source archive.
|
|
||||||
*
|
|
||||||
* @param file Filename within the source archive. May be `undefined` to return a dummy file path.
|
|
||||||
*/
|
|
||||||
resolveSourceFile(file: string | undefined): vscode.Uri;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Holds if the database item has a `.dbinfo` or `codeql-database.yml` file.
|
|
||||||
*/
|
|
||||||
hasMetadataFile(): Promise<boolean>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns `sourceLocationPrefix` of exported database.
|
|
||||||
*/
|
|
||||||
getSourceLocationPrefix(server: cli.CodeQLCliServer): Promise<string>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns dataset folder of exported database.
|
|
||||||
*/
|
|
||||||
getDatasetFolder(server: cli.CodeQLCliServer): Promise<string>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the root uri of the virtual filesystem for this database's source archive,
|
|
||||||
* as displayed in the filesystem explorer.
|
|
||||||
*/
|
|
||||||
getSourceArchiveExplorerUri(): vscode.Uri;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Holds if `uri` belongs to this database's source archive.
|
|
||||||
*/
|
|
||||||
belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Whether the database may be affected by test execution for the given path.
|
|
||||||
*/
|
|
||||||
isAffectedByTest(testPath: string): Promise<boolean>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the state of this database, to be persisted in the workspace state.
|
|
||||||
*/
|
|
||||||
getPersistedState(): PersistedDatabaseItem;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies that this database item has a zipped source folder. Returns an error message if it does not.
|
|
||||||
*/
|
|
||||||
verifyZippedSources(): string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum DatabaseEventKind {
|
|
||||||
Add = "Add",
|
|
||||||
Remove = "Remove",
|
|
||||||
|
|
||||||
// Fired when databases are refreshed from persisted state
|
|
||||||
Refresh = "Refresh",
|
|
||||||
|
|
||||||
// Fired when the current database changes
|
|
||||||
Change = "Change",
|
|
||||||
|
|
||||||
Rename = "Rename",
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DatabaseChangedEvent {
|
|
||||||
kind: DatabaseEventKind;
|
|
||||||
item: DatabaseItem | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exported for testing
|
|
||||||
export class DatabaseItemImpl implements DatabaseItem {
|
|
||||||
private _error: Error | undefined = undefined;
|
|
||||||
private _contents: DatabaseContents | undefined;
|
|
||||||
/** A cache of database info */
|
|
||||||
private _dbinfo: cli.DbInfo | undefined;
|
|
||||||
|
|
||||||
public constructor(
|
|
||||||
public readonly databaseUri: vscode.Uri,
|
|
||||||
contents: DatabaseContents | undefined,
|
|
||||||
private options: FullDatabaseOptions,
|
|
||||||
private readonly onChanged: (event: DatabaseChangedEvent) => void,
|
|
||||||
) {
|
|
||||||
this._contents = contents;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get name(): string {
|
|
||||||
if (this.options.displayName) {
|
|
||||||
return this.options.displayName;
|
|
||||||
} else if (this._contents) {
|
|
||||||
return this._contents.name;
|
|
||||||
} else {
|
|
||||||
return basename(this.databaseUri.fsPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public set name(newName: string) {
|
|
||||||
this.options.displayName = newName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get sourceArchive(): vscode.Uri | undefined {
|
|
||||||
if (this.options.ignoreSourceArchive || this._contents === undefined) {
|
|
||||||
return undefined;
|
|
||||||
} else {
|
|
||||||
return this._contents.sourceArchiveUri;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public get contents(): DatabaseContents | undefined {
|
|
||||||
return this._contents;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get dateAdded(): number | undefined {
|
|
||||||
return this.options.dateAdded;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get error(): Error | undefined {
|
|
||||||
return this._error;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async refresh(): Promise<void> {
|
|
||||||
try {
|
|
||||||
try {
|
|
||||||
this._contents = await DatabaseResolver.resolveDatabaseContents(
|
|
||||||
this.databaseUri,
|
|
||||||
);
|
|
||||||
this._error = undefined;
|
|
||||||
} catch (e) {
|
|
||||||
this._contents = undefined;
|
|
||||||
this._error = asError(e);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
this.onChanged({
|
|
||||||
kind: DatabaseEventKind.Refresh,
|
|
||||||
item: this,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public resolveSourceFile(uriStr: string | undefined): vscode.Uri {
|
|
||||||
const sourceArchive = this.sourceArchive;
|
|
||||||
const uri = uriStr ? vscode.Uri.parse(uriStr, true) : undefined;
|
|
||||||
if (uri && uri.scheme !== "file") {
|
|
||||||
throw new Error(
|
|
||||||
`Invalid uri scheme in ${uriStr}. Only 'file' is allowed.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (!sourceArchive) {
|
|
||||||
if (uri) {
|
|
||||||
return uri;
|
|
||||||
} else {
|
|
||||||
return this.databaseUri;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (uri) {
|
|
||||||
const relativeFilePath = decodeURI(uri.path)
|
|
||||||
.replace(":", "_")
|
|
||||||
.replace(/^\/*/, "");
|
|
||||||
if (sourceArchive.scheme === zipArchiveScheme) {
|
|
||||||
const zipRef = decodeSourceArchiveUri(sourceArchive);
|
|
||||||
const pathWithinSourceArchive =
|
|
||||||
zipRef.pathWithinSourceArchive === "/"
|
|
||||||
? relativeFilePath
|
|
||||||
: `${zipRef.pathWithinSourceArchive}/${relativeFilePath}`;
|
|
||||||
return encodeSourceArchiveUri({
|
|
||||||
pathWithinSourceArchive,
|
|
||||||
sourceArchiveZipPath: zipRef.sourceArchiveZipPath,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let newPath = sourceArchive.path;
|
|
||||||
if (!newPath.endsWith("/")) {
|
|
||||||
// Ensure a trailing slash.
|
|
||||||
newPath += "/";
|
|
||||||
}
|
|
||||||
newPath += relativeFilePath;
|
|
||||||
|
|
||||||
return sourceArchive.with({ path: newPath });
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return sourceArchive;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the state of this database, to be persisted in the workspace state.
|
|
||||||
*/
|
|
||||||
public getPersistedState(): PersistedDatabaseItem {
|
|
||||||
return {
|
|
||||||
uri: this.databaseUri.toString(true),
|
|
||||||
options: this.options,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Holds if the database item refers to an exported snapshot
|
|
||||||
*/
|
|
||||||
public async hasMetadataFile(): Promise<boolean> {
|
|
||||||
return await isLikelyDatabaseRoot(this.databaseUri.fsPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns information about a database.
|
|
||||||
*/
|
|
||||||
private async getDbInfo(server: cli.CodeQLCliServer): Promise<cli.DbInfo> {
|
|
||||||
if (this._dbinfo === undefined) {
|
|
||||||
this._dbinfo = await server.resolveDatabase(this.databaseUri.fsPath);
|
|
||||||
}
|
|
||||||
return this._dbinfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns `sourceLocationPrefix` of database. Requires that the database
|
|
||||||
* has a `.dbinfo` file, which is the source of the prefix.
|
|
||||||
*/
|
|
||||||
public async getSourceLocationPrefix(
|
|
||||||
server: cli.CodeQLCliServer,
|
|
||||||
): Promise<string> {
|
|
||||||
const dbInfo = await this.getDbInfo(server);
|
|
||||||
return dbInfo.sourceLocationPrefix;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns path to dataset folder of database.
|
|
||||||
*/
|
|
||||||
public async getDatasetFolder(server: cli.CodeQLCliServer): Promise<string> {
|
|
||||||
const dbInfo = await this.getDbInfo(server);
|
|
||||||
return dbInfo.datasetFolder;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get language() {
|
|
||||||
return this.options.language || "";
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the root uri of the virtual filesystem for this database's source archive.
|
|
||||||
*/
|
|
||||||
public getSourceArchiveExplorerUri(): vscode.Uri {
|
|
||||||
const sourceArchive = this.sourceArchive;
|
|
||||||
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith(".zip")) {
|
|
||||||
throw new Error(this.verifyZippedSources());
|
|
||||||
}
|
|
||||||
return encodeArchiveBasePath(sourceArchive.fsPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
public verifyZippedSources(): string | undefined {
|
|
||||||
const sourceArchive = this.sourceArchive;
|
|
||||||
if (sourceArchive === undefined) {
|
|
||||||
return `${this.name} has no source archive.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sourceArchive.fsPath.endsWith(".zip")) {
|
|
||||||
return `${this.name} has a source folder that is unzipped.`;
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Holds if `uri` belongs to this database's source archive.
|
|
||||||
*/
|
|
||||||
public belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean {
|
|
||||||
if (this.sourceArchive === undefined) return false;
|
|
||||||
return (
|
|
||||||
uri.scheme === zipArchiveScheme &&
|
|
||||||
decodeSourceArchiveUri(uri).sourceArchiveZipPath ===
|
|
||||||
this.sourceArchive.fsPath
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async isAffectedByTest(testPath: string): Promise<boolean> {
|
|
||||||
const databasePath = this.databaseUri.fsPath;
|
|
||||||
if (!databasePath.endsWith(".testproj")) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const stats = await stat(testPath);
|
|
||||||
if (stats.isDirectory()) {
|
|
||||||
return !relative(testPath, databasePath).startsWith("..");
|
|
||||||
} else {
|
|
||||||
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
|
|
||||||
const testdir = dirname(testPath);
|
|
||||||
const testdirbase = basename(testdir);
|
|
||||||
return pathsEqual(
|
|
||||||
databasePath,
|
|
||||||
join(testdir, `${testdirbase}.testproj`),
|
|
||||||
process.platform,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// No information available for test path - assume database is unaffected.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A promise that resolves to an event's result value when the event
|
* A promise that resolves to an event's result value when the event
|
||||||
* `event` fires. If waiting for the event takes too long (by default
|
* `event` fires. If waiting for the event takes too long (by default
|
||||||
@@ -602,7 +83,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
readonly onDidChangeCurrentDatabaseItem =
|
readonly onDidChangeCurrentDatabaseItem =
|
||||||
this._onDidChangeCurrentDatabaseItem.event;
|
this._onDidChangeCurrentDatabaseItem.event;
|
||||||
|
|
||||||
private readonly _databaseItems: DatabaseItem[] = [];
|
private readonly _databaseItems: DatabaseItemImpl[] = [];
|
||||||
private _currentDatabaseItem: DatabaseItem | undefined = undefined;
|
private _currentDatabaseItem: DatabaseItem | undefined = undefined;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
@@ -646,8 +127,8 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
*
|
*
|
||||||
* Typically, the item will have been created by {@link createOrOpenDatabaseItem} or {@link openDatabase}.
|
* Typically, the item will have been created by {@link createOrOpenDatabaseItem} or {@link openDatabase}.
|
||||||
*/
|
*/
|
||||||
public async addExistingDatabaseItem(
|
private async addExistingDatabaseItem(
|
||||||
databaseItem: DatabaseItem,
|
databaseItem: DatabaseItemImpl,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
makeSelected: boolean,
|
makeSelected: boolean,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
@@ -681,7 +162,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
private async createDatabaseItem(
|
private async createDatabaseItem(
|
||||||
uri: vscode.Uri,
|
uri: vscode.Uri,
|
||||||
displayName: string | undefined,
|
displayName: string | undefined,
|
||||||
): Promise<DatabaseItem> {
|
): Promise<DatabaseItemImpl> {
|
||||||
const contents = await DatabaseResolver.resolveDatabaseContents(uri);
|
const contents = await DatabaseResolver.resolveDatabaseContents(uri);
|
||||||
// Ignore the source archive for QLTest databases by default.
|
// Ignore the source archive for QLTest databases by default.
|
||||||
const isQLTestDatabase = extname(uri.fsPath) === ".testproj";
|
const isQLTestDatabase = extname(uri.fsPath) === ".testproj";
|
||||||
@@ -692,14 +173,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
dateAdded: Date.now(),
|
dateAdded: Date.now(),
|
||||||
language: await this.getPrimaryLanguage(uri.fsPath),
|
language: await this.getPrimaryLanguage(uri.fsPath),
|
||||||
};
|
};
|
||||||
const databaseItem = new DatabaseItemImpl(
|
const databaseItem = new DatabaseItemImpl(uri, contents, fullOptions);
|
||||||
uri,
|
|
||||||
contents,
|
|
||||||
fullOptions,
|
|
||||||
(event) => {
|
|
||||||
this._onDidChangeDatabaseItem.fire(event);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return databaseItem;
|
return databaseItem;
|
||||||
}
|
}
|
||||||
@@ -848,7 +322,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
state: PersistedDatabaseItem,
|
state: PersistedDatabaseItem,
|
||||||
): Promise<DatabaseItem> {
|
): Promise<DatabaseItemImpl> {
|
||||||
let displayName: string | undefined = undefined;
|
let displayName: string | undefined = undefined;
|
||||||
let ignoreSourceArchive = false;
|
let ignoreSourceArchive = false;
|
||||||
let dateAdded = undefined;
|
let dateAdded = undefined;
|
||||||
@@ -878,14 +352,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
dateAdded,
|
dateAdded,
|
||||||
language,
|
language,
|
||||||
};
|
};
|
||||||
const item = new DatabaseItemImpl(
|
const item = new DatabaseItemImpl(dbBaseUri, undefined, fullOptions);
|
||||||
dbBaseUri,
|
|
||||||
undefined,
|
|
||||||
fullOptions,
|
|
||||||
(event) => {
|
|
||||||
this._onDidChangeDatabaseItem.fire(event);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Avoid persisting the database state after adding since that should happen only after
|
// Avoid persisting the database state after adding since that should happen only after
|
||||||
// all databases have been added.
|
// all databases have been added.
|
||||||
@@ -926,7 +393,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
database,
|
database,
|
||||||
);
|
);
|
||||||
try {
|
try {
|
||||||
await databaseItem.refresh();
|
await this.refreshDatabase(databaseItem);
|
||||||
await this.registerDatabase(progress, token, databaseItem);
|
await this.registerDatabase(progress, token, databaseItem);
|
||||||
if (currentDatabaseUri === database.uri) {
|
if (currentDatabaseUri === database.uri) {
|
||||||
await this.setCurrentDatabaseItem(databaseItem, true);
|
await this.setCurrentDatabaseItem(databaseItem, true);
|
||||||
@@ -968,8 +435,12 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
item: DatabaseItem | undefined,
|
item: DatabaseItem | undefined,
|
||||||
skipRefresh = false,
|
skipRefresh = false,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (!skipRefresh && item !== undefined) {
|
if (
|
||||||
await item.refresh(); // Will throw on invalid database.
|
!skipRefresh &&
|
||||||
|
item !== undefined &&
|
||||||
|
item instanceof DatabaseItemImpl
|
||||||
|
) {
|
||||||
|
await this.refreshDatabase(item); // Will throw on invalid database.
|
||||||
}
|
}
|
||||||
if (this._currentDatabaseItem !== item) {
|
if (this._currentDatabaseItem !== item) {
|
||||||
this._currentDatabaseItem = item;
|
this._currentDatabaseItem = item;
|
||||||
@@ -1018,7 +489,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
private async addDatabaseItem(
|
private async addDatabaseItem(
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
item: DatabaseItem,
|
item: DatabaseItemImpl,
|
||||||
updatePersistedState = true,
|
updatePersistedState = true,
|
||||||
) {
|
) {
|
||||||
this._databaseItems.push(item);
|
this._databaseItems.push(item);
|
||||||
@@ -1135,6 +606,34 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
await this.qs.registerDatabase(progress, token, dbItem);
|
await this.qs.registerDatabase(progress, token, dbItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolves the contents of the database.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* The contents include the database directory, source archive, and metadata about the database.
|
||||||
|
* If the database is invalid, `databaseItem.error` is updated with the error object that describes why
|
||||||
|
* the database is invalid. This error is also thrown.
|
||||||
|
*/
|
||||||
|
private async refreshDatabase(databaseItem: DatabaseItemImpl) {
|
||||||
|
try {
|
||||||
|
try {
|
||||||
|
databaseItem.contents = await DatabaseResolver.resolveDatabaseContents(
|
||||||
|
databaseItem.databaseUri,
|
||||||
|
);
|
||||||
|
databaseItem.error = undefined;
|
||||||
|
} catch (e) {
|
||||||
|
databaseItem.contents = undefined;
|
||||||
|
databaseItem.error = asError(e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
this._onDidChangeDatabaseItem.fire({
|
||||||
|
kind: DatabaseEventKind.Refresh,
|
||||||
|
item: databaseItem,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private updatePersistedCurrentDatabaseItem(): void {
|
private updatePersistedCurrentDatabaseItem(): void {
|
||||||
void this.ctx.workspaceState.update(
|
void this.ctx.workspaceState.update(
|
||||||
CURRENT_DB,
|
CURRENT_DB,
|
||||||
@@ -1164,15 +663,3 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
return dbInfo.languages?.[0] || "";
|
return dbInfo.languages?.[0] || "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the set of directories containing upgrades, given a list of
|
|
||||||
* scripts returned by the cli's upgrade resolution.
|
|
||||||
*/
|
|
||||||
export function getUpgradesDirectories(scripts: string[]): vscode.Uri[] {
|
|
||||||
const parentDirs = scripts.map((dir) => dirname(dir));
|
|
||||||
const uniqueParentDirs = new Set(parentDirs);
|
|
||||||
return Array.from(uniqueParentDirs).map((filePath) =>
|
|
||||||
vscode.Uri.file(filePath),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
export interface DatabaseOptions {
|
||||||
|
displayName?: string;
|
||||||
|
ignoreSourceArchive?: boolean;
|
||||||
|
dateAdded?: number | undefined;
|
||||||
|
language?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FullDatabaseOptions extends DatabaseOptions {
|
||||||
|
ignoreSourceArchive: boolean;
|
||||||
|
dateAdded: number | undefined;
|
||||||
|
language: string | undefined;
|
||||||
|
}
|
||||||
@@ -0,0 +1,144 @@
|
|||||||
|
import vscode from "vscode";
|
||||||
|
import { pathExists } from "fs-extra";
|
||||||
|
import { basename, join, resolve } from "path";
|
||||||
|
import {
|
||||||
|
DatabaseContents,
|
||||||
|
DatabaseContentsWithDbScheme,
|
||||||
|
DatabaseKind,
|
||||||
|
} from "./database-contents";
|
||||||
|
import { glob } from "glob";
|
||||||
|
import {
|
||||||
|
showAndLogInformationMessage,
|
||||||
|
showAndLogWarningMessage,
|
||||||
|
} from "../../helpers";
|
||||||
|
import { encodeArchiveBasePath } from "../../common/vscode/archive-filesystem-provider";
|
||||||
|
|
||||||
|
export class DatabaseResolver {
|
||||||
|
public static async resolveDatabaseContents(
|
||||||
|
uri: vscode.Uri,
|
||||||
|
): Promise<DatabaseContentsWithDbScheme> {
|
||||||
|
if (uri.scheme !== "file") {
|
||||||
|
throw new Error(
|
||||||
|
`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const databasePath = uri.fsPath;
|
||||||
|
if (!(await pathExists(databasePath))) {
|
||||||
|
throw new InvalidDatabaseError(
|
||||||
|
`Database '${databasePath}' does not exist.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const contents = await this.resolveDatabase(databasePath);
|
||||||
|
|
||||||
|
if (contents === undefined) {
|
||||||
|
throw new InvalidDatabaseError(
|
||||||
|
`'${databasePath}' is not a valid database.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for a single dbscheme file within the database.
|
||||||
|
// This should be found in the dataset directory, regardless of the form of database.
|
||||||
|
const dbPath = contents.datasetUri.fsPath;
|
||||||
|
const dbSchemeFiles = await getDbSchemeFiles(dbPath);
|
||||||
|
if (dbSchemeFiles.length === 0) {
|
||||||
|
throw new InvalidDatabaseError(
|
||||||
|
`Database '${databasePath}' does not contain a CodeQL dbscheme under '${dbPath}'.`,
|
||||||
|
);
|
||||||
|
} else if (dbSchemeFiles.length > 1) {
|
||||||
|
throw new InvalidDatabaseError(
|
||||||
|
`Database '${databasePath}' contains multiple CodeQL dbschemes under '${dbPath}'.`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const dbSchemeUri = vscode.Uri.file(resolve(dbPath, dbSchemeFiles[0]));
|
||||||
|
return {
|
||||||
|
...contents,
|
||||||
|
dbSchemeUri,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async resolveDatabase(
|
||||||
|
databasePath: string,
|
||||||
|
): Promise<DatabaseContents> {
|
||||||
|
const name = basename(databasePath);
|
||||||
|
|
||||||
|
// Look for dataset and source archive.
|
||||||
|
const datasetUri = await findDataset(databasePath);
|
||||||
|
const sourceArchiveUri = await findSourceArchive(databasePath);
|
||||||
|
|
||||||
|
return {
|
||||||
|
kind: DatabaseKind.Database,
|
||||||
|
name,
|
||||||
|
datasetUri,
|
||||||
|
sourceArchiveUri,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An error thrown when we cannot find a valid database in a putative
|
||||||
|
* database directory.
|
||||||
|
*/
|
||||||
|
class InvalidDatabaseError extends Error {}
|
||||||
|
|
||||||
|
async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
|
||||||
|
/*
|
||||||
|
* Look directly in the root
|
||||||
|
*/
|
||||||
|
let dbRelativePaths = await glob("db-*/", {
|
||||||
|
cwd: parentDirectory,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (dbRelativePaths.length === 0) {
|
||||||
|
/*
|
||||||
|
* Check If they are in the old location
|
||||||
|
*/
|
||||||
|
dbRelativePaths = await glob("working/db-*/", {
|
||||||
|
cwd: parentDirectory,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (dbRelativePaths.length === 0) {
|
||||||
|
throw new InvalidDatabaseError(
|
||||||
|
`'${parentDirectory}' does not contain a dataset directory.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbAbsolutePath = join(parentDirectory, dbRelativePaths[0]);
|
||||||
|
if (dbRelativePaths.length > 1) {
|
||||||
|
void showAndLogWarningMessage(
|
||||||
|
`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return vscode.Uri.file(dbAbsolutePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
|
||||||
|
async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
||||||
|
return await glob("*.dbscheme", { cwd: dbDirectory });
|
||||||
|
}
|
||||||
|
|
||||||
|
// exported for testing
|
||||||
|
export async function findSourceArchive(
|
||||||
|
databasePath: string,
|
||||||
|
): Promise<vscode.Uri | undefined> {
|
||||||
|
const relativePaths = ["src", "output/src_archive"];
|
||||||
|
|
||||||
|
for (const relativePath of relativePaths) {
|
||||||
|
const basePath = join(databasePath, relativePath);
|
||||||
|
const zipPath = `${basePath}.zip`;
|
||||||
|
|
||||||
|
// Prefer using a zip archive over a directory.
|
||||||
|
if (await pathExists(zipPath)) {
|
||||||
|
return encodeArchiveBasePath(zipPath);
|
||||||
|
} else if (await pathExists(basePath)) {
|
||||||
|
return vscode.Uri.file(basePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void showAndLogInformationMessage(
|
||||||
|
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
11
extensions/ql-vscode/src/databases/local-databases/index.ts
Normal file
11
extensions/ql-vscode/src/databases/local-databases/index.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
export {
|
||||||
|
DatabaseContents,
|
||||||
|
DatabaseContentsWithDbScheme,
|
||||||
|
DatabaseKind,
|
||||||
|
} from "./database-contents";
|
||||||
|
export { DatabaseChangedEvent, DatabaseEventKind } from "./database-events";
|
||||||
|
export { DatabaseItem } from "./database-item";
|
||||||
|
export { DatabaseItemImpl } from "./database-item-impl";
|
||||||
|
export { DatabaseManager } from "./database-manager";
|
||||||
|
export { DatabaseResolver } from "./database-resolver";
|
||||||
|
export { DatabaseOptions, FullDatabaseOptions } from "./database-options";
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
|
ProgressLocation,
|
||||||
QuickPickItem,
|
QuickPickItem,
|
||||||
TreeView,
|
TreeView,
|
||||||
TreeViewExpansionEvent,
|
TreeViewExpansionEvent,
|
||||||
@@ -13,7 +14,10 @@ import {
|
|||||||
getOwnerFromGitHubUrl,
|
getOwnerFromGitHubUrl,
|
||||||
isValidGitHubOwner,
|
isValidGitHubOwner,
|
||||||
} from "../../common/github-url-identifier-helper";
|
} from "../../common/github-url-identifier-helper";
|
||||||
import { showAndLogErrorMessage } from "../../helpers";
|
import {
|
||||||
|
showAndLogErrorMessage,
|
||||||
|
showAndLogInformationMessage,
|
||||||
|
} from "../../helpers";
|
||||||
import { DisposableObject } from "../../pure/disposable-object";
|
import { DisposableObject } from "../../pure/disposable-object";
|
||||||
import {
|
import {
|
||||||
DbItem,
|
DbItem,
|
||||||
@@ -32,6 +36,8 @@ import { getControllerRepo } from "../../variant-analysis/run-remote-query";
|
|||||||
import { getErrorMessage } from "../../pure/helpers-pure";
|
import { getErrorMessage } from "../../pure/helpers-pure";
|
||||||
import { DatabasePanelCommands } from "../../common/commands";
|
import { DatabasePanelCommands } from "../../common/commands";
|
||||||
import { App } from "../../common/app";
|
import { App } from "../../common/app";
|
||||||
|
import { getCodeSearchRepositories } from "../../variant-analysis/gh-api/gh-api-client";
|
||||||
|
import { QueryLanguage } from "../../common/query-language";
|
||||||
|
|
||||||
export interface RemoteDatabaseQuickPickItem extends QuickPickItem {
|
export interface RemoteDatabaseQuickPickItem extends QuickPickItem {
|
||||||
remoteDatabaseKind: string;
|
remoteDatabaseKind: string;
|
||||||
@@ -41,6 +47,10 @@ export interface AddListQuickPickItem extends QuickPickItem {
|
|||||||
databaseKind: DbListKind;
|
databaseKind: DbListKind;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CodeSearchQuickPickItem extends QuickPickItem {
|
||||||
|
language: string;
|
||||||
|
}
|
||||||
|
|
||||||
export class DbPanel extends DisposableObject {
|
export class DbPanel extends DisposableObject {
|
||||||
private readonly dataProvider: DbTreeDataProvider;
|
private readonly dataProvider: DbTreeDataProvider;
|
||||||
private readonly treeView: TreeView<DbTreeViewItem>;
|
private readonly treeView: TreeView<DbTreeViewItem>;
|
||||||
@@ -93,6 +103,8 @@ export class DbPanel extends DisposableObject {
|
|||||||
this.renameItem.bind(this),
|
this.renameItem.bind(this),
|
||||||
"codeQLVariantAnalysisRepositories.removeItemContextMenu":
|
"codeQLVariantAnalysisRepositories.removeItemContextMenu":
|
||||||
this.removeItem.bind(this),
|
this.removeItem.bind(this),
|
||||||
|
"codeQLVariantAnalysisRepositories.importFromCodeSearch":
|
||||||
|
this.importFromCodeSearch.bind(this),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,7 +183,14 @@ export class DbPanel extends DisposableObject {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.dbManager.addNewRemoteRepo(nwo, parentList);
|
const truncatedRepositories = await this.dbManager.addNewRemoteRepo(
|
||||||
|
nwo,
|
||||||
|
parentList,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (parentList) {
|
||||||
|
this.reportAnyTruncatedRepos(truncatedRepositories, parentList);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async addNewRemoteOwner(): Promise<void> {
|
private async addNewRemoteOwner(): Promise<void> {
|
||||||
@@ -323,6 +342,89 @@ export class DbPanel extends DisposableObject {
|
|||||||
await this.dbManager.removeDbItem(treeViewItem.dbItem);
|
await this.dbManager.removeDbItem(treeViewItem.dbItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async importFromCodeSearch(
|
||||||
|
treeViewItem: DbTreeViewItem,
|
||||||
|
): Promise<void> {
|
||||||
|
if (treeViewItem.dbItem?.kind !== DbItemKind.RemoteUserDefinedList) {
|
||||||
|
throw new Error("Please select a valid list to add code search results.");
|
||||||
|
}
|
||||||
|
|
||||||
|
const listName = treeViewItem.dbItem.listName;
|
||||||
|
|
||||||
|
const languageQuickPickItems: CodeSearchQuickPickItem[] = Object.values(
|
||||||
|
QueryLanguage,
|
||||||
|
).map((language) => ({
|
||||||
|
label: language.toString(),
|
||||||
|
alwaysShow: true,
|
||||||
|
language: language.toString(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const codeSearchLanguage =
|
||||||
|
await window.showQuickPick<CodeSearchQuickPickItem>(
|
||||||
|
languageQuickPickItems,
|
||||||
|
{
|
||||||
|
title: "Select a language for your search",
|
||||||
|
placeHolder: "Select an option",
|
||||||
|
ignoreFocusOut: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (!codeSearchLanguage) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const codeSearchQuery = await window.showInputBox({
|
||||||
|
title: "GitHub Code Search",
|
||||||
|
prompt:
|
||||||
|
"Use [GitHub's Code Search syntax](https://docs.github.com/en/search-github/github-code-search/understanding-github-code-search-syntax), including code qualifiers, regular expressions, and boolean operations, to search for repositories.",
|
||||||
|
placeHolder: "org:github",
|
||||||
|
});
|
||||||
|
if (codeSearchQuery === undefined || codeSearchQuery === "") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
void window.withProgress(
|
||||||
|
{
|
||||||
|
location: ProgressLocation.Notification,
|
||||||
|
title: "Searching for repositories... This might take a while",
|
||||||
|
cancellable: true,
|
||||||
|
},
|
||||||
|
async (progress, token) => {
|
||||||
|
progress.report({ increment: 10 });
|
||||||
|
|
||||||
|
const repositories = await getCodeSearchRepositories(
|
||||||
|
this.app.credentials,
|
||||||
|
`${codeSearchQuery} language:${codeSearchLanguage.language}`,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
);
|
||||||
|
|
||||||
|
token.onCancellationRequested(() => {
|
||||||
|
void showAndLogInformationMessage("Code search cancelled");
|
||||||
|
return;
|
||||||
|
});
|
||||||
|
|
||||||
|
progress.report({ increment: 10, message: "Processing results..." });
|
||||||
|
|
||||||
|
const truncatedRepositories =
|
||||||
|
await this.dbManager.addNewRemoteReposToList(repositories, listName);
|
||||||
|
this.reportAnyTruncatedRepos(truncatedRepositories, listName);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private reportAnyTruncatedRepos(
|
||||||
|
truncatedRepositories: string[],
|
||||||
|
listName: string,
|
||||||
|
) {
|
||||||
|
if (truncatedRepositories.length > 0) {
|
||||||
|
void showAndLogErrorMessage(
|
||||||
|
`Some repositories were not added to '${listName}' because a list can only have 1000 entries. Excluded repositories: ${truncatedRepositories.join(
|
||||||
|
", ",
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async onDidCollapseElement(
|
private async onDidCollapseElement(
|
||||||
event: TreeViewExpansionEvent<DbTreeViewItem>,
|
event: TreeViewExpansionEvent<DbTreeViewItem>,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ export type DbTreeViewItemAction =
|
|||||||
| "canBeSelected"
|
| "canBeSelected"
|
||||||
| "canBeRemoved"
|
| "canBeRemoved"
|
||||||
| "canBeRenamed"
|
| "canBeRenamed"
|
||||||
| "canBeOpenedOnGitHub";
|
| "canBeOpenedOnGitHub"
|
||||||
|
| "canImportCodeSearch";
|
||||||
|
|
||||||
export function getDbItemActions(dbItem: DbItem): DbTreeViewItemAction[] {
|
export function getDbItemActions(dbItem: DbItem): DbTreeViewItemAction[] {
|
||||||
const actions: DbTreeViewItemAction[] = [];
|
const actions: DbTreeViewItemAction[] = [];
|
||||||
@@ -21,7 +22,9 @@ export function getDbItemActions(dbItem: DbItem): DbTreeViewItemAction[] {
|
|||||||
if (canBeOpenedOnGitHub(dbItem)) {
|
if (canBeOpenedOnGitHub(dbItem)) {
|
||||||
actions.push("canBeOpenedOnGitHub");
|
actions.push("canBeOpenedOnGitHub");
|
||||||
}
|
}
|
||||||
|
if (canImportCodeSearch(dbItem)) {
|
||||||
|
actions.push("canImportCodeSearch");
|
||||||
|
}
|
||||||
return actions;
|
return actions;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,6 +63,10 @@ function canBeOpenedOnGitHub(dbItem: DbItem): boolean {
|
|||||||
return dbItemKindsThatCanBeOpenedOnGitHub.includes(dbItem.kind);
|
return dbItemKindsThatCanBeOpenedOnGitHub.includes(dbItem.kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function canImportCodeSearch(dbItem: DbItem): boolean {
|
||||||
|
return DbItemKind.RemoteUserDefinedList === dbItem.kind;
|
||||||
|
}
|
||||||
|
|
||||||
export function getGitHubUrl(dbItem: DbItem): string | undefined {
|
export function getGitHubUrl(dbItem: DbItem): string | undefined {
|
||||||
switch (dbItem.kind) {
|
switch (dbItem.kind) {
|
||||||
case DbItemKind.RemoteOwner:
|
case DbItemKind.RemoteOwner:
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ export class QLDebugConfigurationProvider
|
|||||||
validateQueryPath(qlConfiguration.query, quickEval);
|
validateQueryPath(qlConfiguration.query, quickEval);
|
||||||
|
|
||||||
const quickEvalContext = quickEval
|
const quickEvalContext = quickEval
|
||||||
? await getQuickEvalContext(undefined)
|
? await getQuickEvalContext(undefined, false)
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
const resultConfiguration: QLResolvedDebugConfiguration = {
|
const resultConfiguration: QLResolvedDebugConfiguration = {
|
||||||
|
|||||||
@@ -155,6 +155,7 @@ class RunningQuery extends DisposableObject {
|
|||||||
{
|
{
|
||||||
queryPath: config.query,
|
queryPath: config.query,
|
||||||
quickEvalPosition: quickEvalContext?.quickEvalPosition,
|
quickEvalPosition: quickEvalContext?.quickEvalPosition,
|
||||||
|
quickEvalCountOnly: quickEvalContext?.quickEvalCount,
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
config.additionalPacks,
|
config.additionalPacks,
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class QLDebugAdapterTracker
|
|||||||
|
|
||||||
public async quickEval(): Promise<void> {
|
public async quickEval(): Promise<void> {
|
||||||
const args: CodeQLProtocol.QuickEvalRequest["arguments"] = {
|
const args: CodeQLProtocol.QuickEvalRequest["arguments"] = {
|
||||||
quickEvalContext: await getQuickEvalContext(undefined),
|
quickEvalContext: await getQuickEvalContext(undefined, false),
|
||||||
};
|
};
|
||||||
await this.session.customRequest("codeql-quickeval", args);
|
await this.session.customRequest("codeql-quickeval", args);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ import {
|
|||||||
activate as archiveFilesystemProvider_activate,
|
activate as archiveFilesystemProvider_activate,
|
||||||
zipArchiveScheme,
|
zipArchiveScheme,
|
||||||
} from "./common/vscode/archive-filesystem-provider";
|
} from "./common/vscode/archive-filesystem-provider";
|
||||||
import { CodeQLCliServer } from "./codeql-cli/cli";
|
import { CliVersionConstraint, CodeQLCliServer } from "./codeql-cli/cli";
|
||||||
import {
|
import {
|
||||||
CliConfigListener,
|
CliConfigListener,
|
||||||
DistributionConfigListener,
|
DistributionConfigListener,
|
||||||
@@ -125,6 +125,7 @@ import { TestManager } from "./query-testing/test-manager";
|
|||||||
import { TestRunner } from "./query-testing/test-runner";
|
import { TestRunner } from "./query-testing/test-runner";
|
||||||
import { TestManagerBase } from "./query-testing/test-manager-base";
|
import { TestManagerBase } from "./query-testing/test-manager-base";
|
||||||
import { NewQueryRunner, QueryRunner, QueryServerClient } from "./query-server";
|
import { NewQueryRunner, QueryRunner, QueryServerClient } from "./query-server";
|
||||||
|
import { QueriesModule } from "./queries-panel/queries-module";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* extension.ts
|
* extension.ts
|
||||||
@@ -407,6 +408,28 @@ export async function activate(
|
|||||||
codeQlExtension.cliServer.addVersionChangedListener((ver) => {
|
codeQlExtension.cliServer.addVersionChangedListener((ver) => {
|
||||||
telemetryListener.cliVersion = ver;
|
telemetryListener.cliVersion = ver;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let unsupportedWarningShown = false;
|
||||||
|
codeQlExtension.cliServer.addVersionChangedListener((ver) => {
|
||||||
|
if (!ver) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unsupportedWarningShown) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (CliVersionConstraint.OLDEST_SUPPORTED_CLI_VERSION.compare(ver) < 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
void showAndLogWarningMessage(
|
||||||
|
`You are using an unsupported version of the CodeQL CLI (${ver}). ` +
|
||||||
|
`The minimum supported version is ${CliVersionConstraint.OLDEST_SUPPORTED_CLI_VERSION}. ` +
|
||||||
|
`Please upgrade to a newer version of the CodeQL CLI.`,
|
||||||
|
);
|
||||||
|
unsupportedWarningShown = true;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return codeQlExtension;
|
return codeQlExtension;
|
||||||
@@ -732,6 +755,8 @@ async function activateWithInstalledDistribution(
|
|||||||
);
|
);
|
||||||
ctx.subscriptions.push(databaseUI);
|
ctx.subscriptions.push(databaseUI);
|
||||||
|
|
||||||
|
QueriesModule.initialize(app, cliServer);
|
||||||
|
|
||||||
void extLogger.log("Initializing evaluator log viewer.");
|
void extLogger.log("Initializing evaluator log viewer.");
|
||||||
const evalLogViewer = new EvalLogViewer();
|
const evalLogViewer = new EvalLogViewer();
|
||||||
ctx.subscriptions.push(evalLogViewer);
|
ctx.subscriptions.push(evalLogViewer);
|
||||||
|
|||||||
@@ -10,14 +10,7 @@ import { glob } from "glob";
|
|||||||
import { load } from "js-yaml";
|
import { load } from "js-yaml";
|
||||||
import { join, basename, dirname } from "path";
|
import { join, basename, dirname } from "path";
|
||||||
import { dirSync } from "tmp-promise";
|
import { dirSync } from "tmp-promise";
|
||||||
import {
|
import { Uri, window as Window, workspace, env, WorkspaceFolder } from "vscode";
|
||||||
ExtensionContext,
|
|
||||||
Uri,
|
|
||||||
window as Window,
|
|
||||||
workspace,
|
|
||||||
env,
|
|
||||||
WorkspaceFolder,
|
|
||||||
} from "vscode";
|
|
||||||
import { CodeQLCliServer, QlpacksInfo } from "./codeql-cli/cli";
|
import { CodeQLCliServer, QlpacksInfo } from "./codeql-cli/cli";
|
||||||
import { UserCancellationException } from "./common/vscode/progress";
|
import { UserCancellationException } from "./common/vscode/progress";
|
||||||
import { extLogger, OutputChannelLogger } from "./common";
|
import { extLogger, OutputChannelLogger } from "./common";
|
||||||
@@ -98,7 +91,7 @@ export async function showAndLogErrorMessage(
|
|||||||
return internalShowAndLog(
|
return internalShowAndLog(
|
||||||
dropLinesExceptInitial(message),
|
dropLinesExceptInitial(message),
|
||||||
Window.showErrorMessage,
|
Window.showErrorMessage,
|
||||||
options,
|
{ fullMessage: message, ...options },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -363,106 +356,6 @@ export async function prepareCodeTour(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Provides a utility method to invoke a function only if a minimum time interval has elapsed since
|
|
||||||
* the last invocation of that function.
|
|
||||||
*/
|
|
||||||
export class InvocationRateLimiter<T> {
|
|
||||||
constructor(
|
|
||||||
extensionContext: ExtensionContext,
|
|
||||||
funcIdentifier: string,
|
|
||||||
func: () => Promise<T>,
|
|
||||||
createDate: (dateString?: string) => Date = (s) =>
|
|
||||||
s ? new Date(s) : new Date(),
|
|
||||||
) {
|
|
||||||
this._createDate = createDate;
|
|
||||||
this._extensionContext = extensionContext;
|
|
||||||
this._func = func;
|
|
||||||
this._funcIdentifier = funcIdentifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invoke the function if `minSecondsSinceLastInvocation` seconds have elapsed since the last invocation.
|
|
||||||
*/
|
|
||||||
public async invokeFunctionIfIntervalElapsed(
|
|
||||||
minSecondsSinceLastInvocation: number,
|
|
||||||
): Promise<InvocationRateLimiterResult<T>> {
|
|
||||||
const updateCheckStartDate = this._createDate();
|
|
||||||
const lastInvocationDate = this.getLastInvocationDate();
|
|
||||||
if (
|
|
||||||
minSecondsSinceLastInvocation &&
|
|
||||||
lastInvocationDate &&
|
|
||||||
lastInvocationDate <= updateCheckStartDate &&
|
|
||||||
lastInvocationDate.getTime() + minSecondsSinceLastInvocation * 1000 >
|
|
||||||
updateCheckStartDate.getTime()
|
|
||||||
) {
|
|
||||||
return createRateLimitedResult();
|
|
||||||
}
|
|
||||||
const result = await this._func();
|
|
||||||
await this.setLastInvocationDate(updateCheckStartDate);
|
|
||||||
return createInvokedResult(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
private getLastInvocationDate(): Date | undefined {
|
|
||||||
const maybeDateString: string | undefined =
|
|
||||||
this._extensionContext.globalState.get(
|
|
||||||
InvocationRateLimiter._invocationRateLimiterPrefix +
|
|
||||||
this._funcIdentifier,
|
|
||||||
);
|
|
||||||
return maybeDateString ? this._createDate(maybeDateString) : undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async setLastInvocationDate(date: Date): Promise<void> {
|
|
||||||
return await this._extensionContext.globalState.update(
|
|
||||||
InvocationRateLimiter._invocationRateLimiterPrefix + this._funcIdentifier,
|
|
||||||
date,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private readonly _createDate: (dateString?: string) => Date;
|
|
||||||
private readonly _extensionContext: ExtensionContext;
|
|
||||||
private readonly _func: () => Promise<T>;
|
|
||||||
private readonly _funcIdentifier: string;
|
|
||||||
|
|
||||||
private static readonly _invocationRateLimiterPrefix =
|
|
||||||
"invocationRateLimiter_lastInvocationDate_";
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum InvocationRateLimiterResultKind {
|
|
||||||
Invoked,
|
|
||||||
RateLimited,
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The function was invoked and returned the value `result`.
|
|
||||||
*/
|
|
||||||
interface InvokedResult<T> {
|
|
||||||
kind: InvocationRateLimiterResultKind.Invoked;
|
|
||||||
result: T;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The function was not invoked as the minimum interval since the last invocation had not elapsed.
|
|
||||||
*/
|
|
||||||
interface RateLimitedResult {
|
|
||||||
kind: InvocationRateLimiterResultKind.RateLimited;
|
|
||||||
}
|
|
||||||
|
|
||||||
type InvocationRateLimiterResult<T> = InvokedResult<T> | RateLimitedResult;
|
|
||||||
|
|
||||||
function createInvokedResult<T>(result: T): InvokedResult<T> {
|
|
||||||
return {
|
|
||||||
kind: InvocationRateLimiterResultKind.Invoked,
|
|
||||||
result,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createRateLimitedResult(): RateLimitedResult {
|
|
||||||
return {
|
|
||||||
kind: InvocationRateLimiterResultKind.RateLimited,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface QlPacksForLanguage {
|
export interface QlPacksForLanguage {
|
||||||
/** The name of the pack containing the dbscheme. */
|
/** The name of the pack containing the dbscheme. */
|
||||||
dbschemePack: string;
|
dbschemePack: string;
|
||||||
@@ -584,77 +477,6 @@ export async function getPrimaryDbscheme(
|
|||||||
return dbscheme;
|
return dbscheme;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* A cached mapping from strings to value of type U.
|
|
||||||
*/
|
|
||||||
export class CachedOperation<U> {
|
|
||||||
private readonly operation: (t: string, ...args: any[]) => Promise<U>;
|
|
||||||
private readonly cached: Map<string, U>;
|
|
||||||
private readonly lru: string[];
|
|
||||||
private readonly inProgressCallbacks: Map<
|
|
||||||
string,
|
|
||||||
Array<[(u: U) => void, (reason?: any) => void]>
|
|
||||||
>;
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
operation: (t: string, ...args: any[]) => Promise<U>,
|
|
||||||
private cacheSize = 100,
|
|
||||||
) {
|
|
||||||
this.operation = operation;
|
|
||||||
this.lru = [];
|
|
||||||
this.inProgressCallbacks = new Map<
|
|
||||||
string,
|
|
||||||
Array<[(u: U) => void, (reason?: any) => void]>
|
|
||||||
>();
|
|
||||||
this.cached = new Map<string, U>();
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(t: string, ...args: any[]): Promise<U> {
|
|
||||||
// Try and retrieve from the cache
|
|
||||||
const fromCache = this.cached.get(t);
|
|
||||||
if (fromCache !== undefined) {
|
|
||||||
// Move to end of lru list
|
|
||||||
this.lru.push(
|
|
||||||
this.lru.splice(
|
|
||||||
this.lru.findIndex((v) => v === t),
|
|
||||||
1,
|
|
||||||
)[0],
|
|
||||||
);
|
|
||||||
return fromCache;
|
|
||||||
}
|
|
||||||
// Otherwise check if in progress
|
|
||||||
const inProgressCallback = this.inProgressCallbacks.get(t);
|
|
||||||
if (inProgressCallback !== undefined) {
|
|
||||||
// If so wait for it to resolve
|
|
||||||
return await new Promise((resolve, reject) => {
|
|
||||||
inProgressCallback.push([resolve, reject]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise compute the new value, but leave a callback to allow sharing work
|
|
||||||
const callbacks: Array<[(u: U) => void, (reason?: any) => void]> = [];
|
|
||||||
this.inProgressCallbacks.set(t, callbacks);
|
|
||||||
try {
|
|
||||||
const result = await this.operation(t, ...args);
|
|
||||||
callbacks.forEach((f) => f[0](result));
|
|
||||||
this.inProgressCallbacks.delete(t);
|
|
||||||
if (this.lru.length > this.cacheSize) {
|
|
||||||
const toRemove = this.lru.shift()!;
|
|
||||||
this.cached.delete(toRemove);
|
|
||||||
}
|
|
||||||
this.lru.push(t);
|
|
||||||
this.cached.set(t, result);
|
|
||||||
return result;
|
|
||||||
} catch (e) {
|
|
||||||
// Rethrow error on all callbacks
|
|
||||||
callbacks.forEach((f) => f[1](e));
|
|
||||||
throw e;
|
|
||||||
} finally {
|
|
||||||
this.inProgressCallbacks.delete(t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The following functions al heuristically determine metadata about databases.
|
* The following functions al heuristically determine metadata about databases.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { Uri, window } from "vscode";
|
|||||||
import { withProgress } from "../../common/vscode/progress";
|
import { withProgress } from "../../common/vscode/progress";
|
||||||
import { AstViewer } from "./ast-viewer";
|
import { AstViewer } from "./ast-viewer";
|
||||||
import { AstCfgCommands } from "../../common/commands";
|
import { AstCfgCommands } from "../../common/commands";
|
||||||
import { LocalQueries } from "../../local-queries";
|
import { LocalQueries, QuickEvalType } from "../../local-queries";
|
||||||
import {
|
import {
|
||||||
TemplatePrintAstProvider,
|
TemplatePrintAstProvider,
|
||||||
TemplatePrintCfgProvider,
|
TemplatePrintCfgProvider,
|
||||||
@@ -42,12 +42,17 @@ export function getAstCfgCommands({
|
|||||||
const viewCfg = async () =>
|
const viewCfg = async () =>
|
||||||
withProgress(
|
withProgress(
|
||||||
async (progress, token) => {
|
async (progress, token) => {
|
||||||
const res = await cfgTemplateProvider.provideCfgUri(
|
const editor = window.activeTextEditor;
|
||||||
window.activeTextEditor?.document,
|
const res = !editor
|
||||||
);
|
? undefined
|
||||||
|
: await cfgTemplateProvider.provideCfgUri(
|
||||||
|
editor.document,
|
||||||
|
editor.selection.active.line + 1,
|
||||||
|
editor.selection.active.character + 1,
|
||||||
|
);
|
||||||
if (res) {
|
if (res) {
|
||||||
await localQueries.compileAndRunQuery(
|
await localQueries.compileAndRunQuery(
|
||||||
false,
|
QuickEvalType.None,
|
||||||
res[0],
|
res[0],
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
|
|||||||
@@ -0,0 +1,70 @@
|
|||||||
|
/**
|
||||||
|
* A cached mapping from strings to value of type U.
|
||||||
|
*/
|
||||||
|
export class CachedOperation<U> {
|
||||||
|
private readonly operation: (t: string, ...args: any[]) => Promise<U>;
|
||||||
|
private readonly cached: Map<string, U>;
|
||||||
|
private readonly lru: string[];
|
||||||
|
private readonly inProgressCallbacks: Map<
|
||||||
|
string,
|
||||||
|
Array<[(u: U) => void, (reason?: any) => void]>
|
||||||
|
>;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
operation: (t: string, ...args: any[]) => Promise<U>,
|
||||||
|
private cacheSize = 100,
|
||||||
|
) {
|
||||||
|
this.operation = operation;
|
||||||
|
this.lru = [];
|
||||||
|
this.inProgressCallbacks = new Map<
|
||||||
|
string,
|
||||||
|
Array<[(u: U) => void, (reason?: any) => void]>
|
||||||
|
>();
|
||||||
|
this.cached = new Map<string, U>();
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(t: string, ...args: any[]): Promise<U> {
|
||||||
|
// Try and retrieve from the cache
|
||||||
|
const fromCache = this.cached.get(t);
|
||||||
|
if (fromCache !== undefined) {
|
||||||
|
// Move to end of lru list
|
||||||
|
this.lru.push(
|
||||||
|
this.lru.splice(
|
||||||
|
this.lru.findIndex((v) => v === t),
|
||||||
|
1,
|
||||||
|
)[0],
|
||||||
|
);
|
||||||
|
return fromCache;
|
||||||
|
}
|
||||||
|
// Otherwise check if in progress
|
||||||
|
const inProgressCallback = this.inProgressCallbacks.get(t);
|
||||||
|
if (inProgressCallback !== undefined) {
|
||||||
|
// If so wait for it to resolve
|
||||||
|
return await new Promise((resolve, reject) => {
|
||||||
|
inProgressCallback.push([resolve, reject]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise compute the new value, but leave a callback to allow sharing work
|
||||||
|
const callbacks: Array<[(u: U) => void, (reason?: any) => void]> = [];
|
||||||
|
this.inProgressCallbacks.set(t, callbacks);
|
||||||
|
try {
|
||||||
|
const result = await this.operation(t, ...args);
|
||||||
|
callbacks.forEach((f) => f[0](result));
|
||||||
|
this.inProgressCallbacks.delete(t);
|
||||||
|
if (this.lru.length > this.cacheSize) {
|
||||||
|
const toRemove = this.lru.shift()!;
|
||||||
|
this.cached.delete(toRemove);
|
||||||
|
}
|
||||||
|
this.lru.push(t);
|
||||||
|
this.cached.set(t, result);
|
||||||
|
return result;
|
||||||
|
} catch (e) {
|
||||||
|
// Rethrow error on all callbacks
|
||||||
|
callbacks.forEach((f) => f[1](e));
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
this.inProgressCallbacks.delete(t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -24,7 +24,9 @@ import { QueryResultType } from "../../pure/new-messages";
|
|||||||
import { fileRangeFromURI } from "./file-range-from-uri";
|
import { fileRangeFromURI } from "./file-range-from-uri";
|
||||||
|
|
||||||
export const SELECT_QUERY_NAME = "#select";
|
export const SELECT_QUERY_NAME = "#select";
|
||||||
export const TEMPLATE_NAME = "selectedSourceFile";
|
export const SELECTED_SOURCE_FILE = "selectedSourceFile";
|
||||||
|
export const SELECTED_SOURCE_LINE = "selectedSourceLine";
|
||||||
|
export const SELECTED_SOURCE_COLUMN = "selectedSourceColumn";
|
||||||
|
|
||||||
export interface FullLocationLink extends LocationLink {
|
export interface FullLocationLink extends LocationLink {
|
||||||
originUri: Uri;
|
originUri: Uri;
|
||||||
@@ -124,7 +126,7 @@ async function getLinksFromResults(
|
|||||||
|
|
||||||
function createTemplates(path: string): Record<string, string> {
|
function createTemplates(path: string): Record<string, string> {
|
||||||
return {
|
return {
|
||||||
[TEMPLATE_NAME]: path,
|
[SELECTED_SOURCE_FILE]: path,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -17,13 +17,15 @@ import {
|
|||||||
} from "../../common/vscode/archive-filesystem-provider";
|
} from "../../common/vscode/archive-filesystem-provider";
|
||||||
import { CodeQLCliServer } from "../../codeql-cli/cli";
|
import { CodeQLCliServer } from "../../codeql-cli/cli";
|
||||||
import { DatabaseManager } from "../../databases/local-databases";
|
import { DatabaseManager } from "../../databases/local-databases";
|
||||||
import { CachedOperation } from "../../helpers";
|
import { CachedOperation } from "./cached-operation";
|
||||||
import { ProgressCallback, withProgress } from "../../common/vscode/progress";
|
import { ProgressCallback, withProgress } from "../../common/vscode/progress";
|
||||||
import { KeyType } from "./key-type";
|
import { KeyType } from "./key-type";
|
||||||
import {
|
import {
|
||||||
FullLocationLink,
|
FullLocationLink,
|
||||||
getLocationsForUriString,
|
getLocationsForUriString,
|
||||||
TEMPLATE_NAME,
|
SELECTED_SOURCE_FILE,
|
||||||
|
SELECTED_SOURCE_LINE,
|
||||||
|
SELECTED_SOURCE_COLUMN,
|
||||||
} from "./location-finder";
|
} from "./location-finder";
|
||||||
import {
|
import {
|
||||||
qlpackOfDatabase,
|
qlpackOfDatabase,
|
||||||
@@ -253,7 +255,7 @@ export class TemplatePrintAstProvider {
|
|||||||
|
|
||||||
const query = queries[0];
|
const query = queries[0];
|
||||||
const templates: Record<string, string> = {
|
const templates: Record<string, string> = {
|
||||||
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive,
|
[SELECTED_SOURCE_FILE]: zippedArchive.pathWithinSourceArchive,
|
||||||
};
|
};
|
||||||
|
|
||||||
const results = await runContextualQuery(
|
const results = await runContextualQuery(
|
||||||
@@ -284,15 +286,17 @@ export class TemplatePrintCfgProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async provideCfgUri(
|
async provideCfgUri(
|
||||||
document?: TextDocument,
|
document: TextDocument,
|
||||||
|
line: number,
|
||||||
|
character: number,
|
||||||
): Promise<[Uri, Record<string, string>] | undefined> {
|
): Promise<[Uri, Record<string, string>] | undefined> {
|
||||||
if (!document) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.shouldUseCache()
|
return this.shouldUseCache()
|
||||||
? await this.cache.get(document.uri.toString())
|
? await this.cache.get(
|
||||||
: await this.getCfgUri(document.uri.toString());
|
`${document.uri.toString()}#${line}:${character}`,
|
||||||
|
line,
|
||||||
|
character,
|
||||||
|
)
|
||||||
|
: await this.getCfgUri(document.uri.toString(), line, character);
|
||||||
}
|
}
|
||||||
|
|
||||||
private shouldUseCache() {
|
private shouldUseCache() {
|
||||||
@@ -301,6 +305,8 @@ export class TemplatePrintCfgProvider {
|
|||||||
|
|
||||||
private async getCfgUri(
|
private async getCfgUri(
|
||||||
uriString: string,
|
uriString: string,
|
||||||
|
line: number,
|
||||||
|
character: number,
|
||||||
): Promise<[Uri, Record<string, string>]> {
|
): Promise<[Uri, Record<string, string>]> {
|
||||||
const uri = Uri.parse(uriString, true);
|
const uri = Uri.parse(uriString, true);
|
||||||
if (uri.scheme !== zipArchiveScheme) {
|
if (uri.scheme !== zipArchiveScheme) {
|
||||||
@@ -342,7 +348,9 @@ export class TemplatePrintCfgProvider {
|
|||||||
const queryUri = Uri.file(queries[0]);
|
const queryUri = Uri.file(queries[0]);
|
||||||
|
|
||||||
const templates: Record<string, string> = {
|
const templates: Record<string, string> = {
|
||||||
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive,
|
[SELECTED_SOURCE_FILE]: zippedArchive.pathWithinSourceArchive,
|
||||||
|
[SELECTED_SOURCE_LINE]: line.toString(),
|
||||||
|
[SELECTED_SOURCE_COLUMN]: character.toString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
return [queryUri, templates];
|
return [queryUri, templates];
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ import { App } from "../common/app";
|
|||||||
import { DisposableObject } from "../pure/disposable-object";
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
import { SkeletonQueryWizard } from "../skeleton-query-wizard";
|
import { SkeletonQueryWizard } from "../skeleton-query-wizard";
|
||||||
import { LocalQueryRun } from "./local-query-run";
|
import { LocalQueryRun } from "./local-query-run";
|
||||||
import { createMultiSelectionCommand } from "../common/selection-commands";
|
import { createMultiSelectionCommand } from "../common/vscode/selection-commands";
|
||||||
|
|
||||||
interface DatabaseQuickPickItem extends QuickPickItem {
|
interface DatabaseQuickPickItem extends QuickPickItem {
|
||||||
databaseItem: DatabaseItem;
|
databaseItem: DatabaseItem;
|
||||||
@@ -72,6 +72,12 @@ async function promptToSaveQueryIfNeeded(query: SelectedQuery): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum QuickEvalType {
|
||||||
|
None,
|
||||||
|
QuickEval,
|
||||||
|
QuickEvalCount,
|
||||||
|
}
|
||||||
|
|
||||||
export class LocalQueries extends DisposableObject {
|
export class LocalQueries extends DisposableObject {
|
||||||
public constructor(
|
public constructor(
|
||||||
private readonly app: App,
|
private readonly app: App,
|
||||||
@@ -115,7 +121,13 @@ export class LocalQueries extends DisposableObject {
|
|||||||
private async runQuery(uri: Uri | undefined): Promise<void> {
|
private async runQuery(uri: Uri | undefined): Promise<void> {
|
||||||
await withProgress(
|
await withProgress(
|
||||||
async (progress, token) => {
|
async (progress, token) => {
|
||||||
await this.compileAndRunQuery(false, uri, progress, token, undefined);
|
await this.compileAndRunQuery(
|
||||||
|
QuickEvalType.None,
|
||||||
|
uri,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Running query",
|
title: "Running query",
|
||||||
@@ -185,7 +197,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
await Promise.all(
|
await Promise.all(
|
||||||
queryUris.map(async (uri) =>
|
queryUris.map(async (uri) =>
|
||||||
this.compileAndRunQuery(
|
this.compileAndRunQuery(
|
||||||
false,
|
QuickEvalType.None,
|
||||||
uri,
|
uri,
|
||||||
wrappedProgress,
|
wrappedProgress,
|
||||||
token,
|
token,
|
||||||
@@ -204,7 +216,13 @@ export class LocalQueries extends DisposableObject {
|
|||||||
private async quickEval(uri: Uri): Promise<void> {
|
private async quickEval(uri: Uri): Promise<void> {
|
||||||
await withProgress(
|
await withProgress(
|
||||||
async (progress, token) => {
|
async (progress, token) => {
|
||||||
await this.compileAndRunQuery(true, uri, progress, token, undefined);
|
await this.compileAndRunQuery(
|
||||||
|
QuickEvalType.QuickEval,
|
||||||
|
uri,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Running query",
|
title: "Running query",
|
||||||
@@ -217,7 +235,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
await withProgress(
|
await withProgress(
|
||||||
async (progress, token) =>
|
async (progress, token) =>
|
||||||
await this.compileAndRunQuery(
|
await this.compileAndRunQuery(
|
||||||
true,
|
QuickEvalType.QuickEval,
|
||||||
uri,
|
uri,
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
@@ -331,7 +349,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async compileAndRunQuery(
|
public async compileAndRunQuery(
|
||||||
quickEval: boolean,
|
quickEval: QuickEvalType,
|
||||||
queryUri: Uri | undefined,
|
queryUri: Uri | undefined,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
@@ -352,7 +370,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
|
|
||||||
/** Used by tests */
|
/** Used by tests */
|
||||||
public async compileAndRunQueryInternal(
|
public async compileAndRunQueryInternal(
|
||||||
quickEval: boolean,
|
quickEval: QuickEvalType,
|
||||||
queryUri: Uri | undefined,
|
queryUri: Uri | undefined,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
@@ -364,15 +382,20 @@ export class LocalQueries extends DisposableObject {
|
|||||||
if (queryUri !== undefined) {
|
if (queryUri !== undefined) {
|
||||||
// The query URI is provided by the command, most likely because the command was run from an
|
// The query URI is provided by the command, most likely because the command was run from an
|
||||||
// editor context menu. Use the provided URI, but make sure it's a valid query.
|
// editor context menu. Use the provided URI, but make sure it's a valid query.
|
||||||
queryPath = validateQueryUri(queryUri, quickEval);
|
queryPath = validateQueryUri(queryUri, quickEval !== QuickEvalType.None);
|
||||||
} else {
|
} else {
|
||||||
// Use the currently selected query.
|
// Use the currently selected query.
|
||||||
queryPath = await this.getCurrentQuery(quickEval);
|
queryPath = await this.getCurrentQuery(quickEval !== QuickEvalType.None);
|
||||||
}
|
}
|
||||||
|
|
||||||
const selectedQuery: SelectedQuery = {
|
const selectedQuery: SelectedQuery = {
|
||||||
queryPath,
|
queryPath,
|
||||||
quickEval: quickEval ? await getQuickEvalContext(range) : undefined,
|
quickEval: quickEval
|
||||||
|
? await getQuickEvalContext(
|
||||||
|
range,
|
||||||
|
quickEval === QuickEvalType.QuickEvalCount,
|
||||||
|
)
|
||||||
|
: undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
// If no databaseItem is specified, use the database currently selected in the Databases UI
|
// If no databaseItem is specified, use the database currently selected in the Databases UI
|
||||||
@@ -392,6 +415,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
{
|
{
|
||||||
queryPath: selectedQuery.queryPath,
|
queryPath: selectedQuery.queryPath,
|
||||||
quickEvalPosition: selectedQuery.quickEval?.quickEvalPosition,
|
quickEvalPosition: selectedQuery.quickEval?.quickEvalPosition,
|
||||||
|
quickEvalCountOnly: selectedQuery.quickEval?.quickEvalCount,
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
additionalPacks,
|
additionalPacks,
|
||||||
@@ -481,7 +505,7 @@ export class LocalQueries extends DisposableObject {
|
|||||||
for (const item of quickpick) {
|
for (const item of quickpick) {
|
||||||
try {
|
try {
|
||||||
await this.compileAndRunQuery(
|
await this.compileAndRunQuery(
|
||||||
false,
|
QuickEvalType.None,
|
||||||
uri,
|
uri,
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ export type BqrsKind =
|
|||||||
| "Entity";
|
| "Entity";
|
||||||
|
|
||||||
interface BqrsColumn {
|
interface BqrsColumn {
|
||||||
name: string;
|
name?: string;
|
||||||
kind: BqrsKind;
|
kind: BqrsKind;
|
||||||
}
|
}
|
||||||
export interface DecodedBqrsChunk {
|
export interface DecodedBqrsChunk {
|
||||||
|
|||||||
@@ -544,6 +544,12 @@ export interface GenerateExternalApiMessage {
|
|||||||
t: "generateExternalApi";
|
t: "generateExternalApi";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface GenerateExternalApiFromLlmMessage {
|
||||||
|
t: "generateExternalApiFromLlm";
|
||||||
|
externalApiUsages: ExternalApiUsage[];
|
||||||
|
modeledMethods: Record<string, ModeledMethod>;
|
||||||
|
}
|
||||||
|
|
||||||
export type ToDataExtensionsEditorMessage =
|
export type ToDataExtensionsEditorMessage =
|
||||||
| SetExtensionPackStateMessage
|
| SetExtensionPackStateMessage
|
||||||
| SetExternalApiUsagesMessage
|
| SetExternalApiUsagesMessage
|
||||||
@@ -556,4 +562,5 @@ export type FromDataExtensionsEditorMessage =
|
|||||||
| OpenExtensionPackMessage
|
| OpenExtensionPackMessage
|
||||||
| JumpToUsageMessage
|
| JumpToUsageMessage
|
||||||
| SaveModeledMethods
|
| SaveModeledMethods
|
||||||
| GenerateExternalApiMessage;
|
| GenerateExternalApiMessage
|
||||||
|
| GenerateExternalApiFromLlmMessage;
|
||||||
|
|||||||
@@ -68,6 +68,14 @@ export interface CompilationTarget {
|
|||||||
*/
|
*/
|
||||||
export interface QuickEvalOptions {
|
export interface QuickEvalOptions {
|
||||||
quickEvalPos?: Position;
|
quickEvalPos?: Position;
|
||||||
|
/**
|
||||||
|
* Whether to only count the number of results.
|
||||||
|
*
|
||||||
|
* This is only supported by the new query server
|
||||||
|
* but it isn't worth having a separate type and
|
||||||
|
* it is fine to have an ignored optional field.
|
||||||
|
*/
|
||||||
|
countOnly?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
40
extensions/ql-vscode/src/queries-panel/queries-module.ts
Normal file
40
extensions/ql-vscode/src/queries-panel/queries-module.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { CodeQLCliServer } from "../codeql-cli/cli";
|
||||||
|
import { extLogger } from "../common";
|
||||||
|
import { App, AppMode } from "../common/app";
|
||||||
|
import { isCanary, showQueriesPanel } from "../config";
|
||||||
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
|
import { QueriesPanel } from "./queries-panel";
|
||||||
|
import { QueryDiscovery } from "./query-discovery";
|
||||||
|
|
||||||
|
export class QueriesModule extends DisposableObject {
|
||||||
|
private constructor(readonly app: App) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
private initialize(app: App, cliServer: CodeQLCliServer): void {
|
||||||
|
if (app.mode === AppMode.Production || !isCanary() || !showQueriesPanel()) {
|
||||||
|
// Currently, we only want to expose the new panel when we are in development and canary mode
|
||||||
|
// and the developer has enabled the "Show queries panel" flag.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
void extLogger.log("Initializing queries panel.");
|
||||||
|
|
||||||
|
const queryDiscovery = new QueryDiscovery(app, cliServer);
|
||||||
|
this.push(queryDiscovery);
|
||||||
|
void queryDiscovery.refresh();
|
||||||
|
|
||||||
|
const queriesPanel = new QueriesPanel(queryDiscovery);
|
||||||
|
this.push(queriesPanel);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static initialize(
|
||||||
|
app: App,
|
||||||
|
cliServer: CodeQLCliServer,
|
||||||
|
): QueriesModule {
|
||||||
|
const queriesModule = new QueriesModule(app);
|
||||||
|
app.subscriptions.push(queriesModule);
|
||||||
|
|
||||||
|
queriesModule.initialize(app, cliServer);
|
||||||
|
return queriesModule;
|
||||||
|
}
|
||||||
|
}
|
||||||
17
extensions/ql-vscode/src/queries-panel/queries-panel.ts
Normal file
17
extensions/ql-vscode/src/queries-panel/queries-panel.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import * as vscode from "vscode";
|
||||||
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
|
import { QueryTreeDataProvider } from "./query-tree-data-provider";
|
||||||
|
import { QueryDiscovery } from "./query-discovery";
|
||||||
|
|
||||||
|
export class QueriesPanel extends DisposableObject {
|
||||||
|
public constructor(queryDiscovery: QueryDiscovery) {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const dataProvider = new QueryTreeDataProvider(queryDiscovery);
|
||||||
|
|
||||||
|
const treeView = vscode.window.createTreeView("codeQLQueries", {
|
||||||
|
treeDataProvider: dataProvider,
|
||||||
|
});
|
||||||
|
this.push(treeView);
|
||||||
|
}
|
||||||
|
}
|
||||||
140
extensions/ql-vscode/src/queries-panel/query-discovery.ts
Normal file
140
extensions/ql-vscode/src/queries-panel/query-discovery.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import { dirname, basename, normalize, relative } from "path";
|
||||||
|
import { Discovery } from "../common/discovery";
|
||||||
|
import { CodeQLCliServer } from "../codeql-cli/cli";
|
||||||
|
import { Event, RelativePattern, Uri, WorkspaceFolder } from "vscode";
|
||||||
|
import { MultiFileSystemWatcher } from "../common/vscode/multi-file-system-watcher";
|
||||||
|
import { App } from "../common/app";
|
||||||
|
import { FileTreeDirectory, FileTreeLeaf } from "../common/file-tree-nodes";
|
||||||
|
import { getOnDiskWorkspaceFoldersObjects } from "../helpers";
|
||||||
|
import { AppEventEmitter } from "../common/events";
|
||||||
|
import { QueryDiscoverer } from "./query-tree-data-provider";
|
||||||
|
import { extLogger } from "../common";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The results of discovering queries.
|
||||||
|
*/
|
||||||
|
export interface QueryDiscoveryResults {
|
||||||
|
/**
|
||||||
|
* A tree of directories and query files.
|
||||||
|
* May have multiple roots because of multiple workspaces.
|
||||||
|
*/
|
||||||
|
queries: FileTreeDirectory[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File system paths to watch. If any ql file changes in these directories
|
||||||
|
* or any subdirectories, then this could signify a change in queries.
|
||||||
|
*/
|
||||||
|
watchPaths: Uri[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Discovers all query files contained in the QL packs in a given workspace folder.
|
||||||
|
*/
|
||||||
|
export class QueryDiscovery
|
||||||
|
extends Discovery<QueryDiscoveryResults>
|
||||||
|
implements QueryDiscoverer
|
||||||
|
{
|
||||||
|
private results: QueryDiscoveryResults | undefined;
|
||||||
|
|
||||||
|
private readonly onDidChangeQueriesEmitter: AppEventEmitter<void>;
|
||||||
|
private readonly watcher: MultiFileSystemWatcher = this.push(
|
||||||
|
new MultiFileSystemWatcher(),
|
||||||
|
);
|
||||||
|
|
||||||
|
constructor(app: App, private readonly cliServer: CodeQLCliServer) {
|
||||||
|
super("Query Discovery", extLogger);
|
||||||
|
|
||||||
|
this.onDidChangeQueriesEmitter = this.push(app.createEventEmitter<void>());
|
||||||
|
this.push(app.onDidChangeWorkspaceFolders(this.refresh.bind(this)));
|
||||||
|
this.push(this.watcher.onDidChange(this.refresh.bind(this)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public get queries(): FileTreeDirectory[] | undefined {
|
||||||
|
return this.results?.queries;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event to be fired when the set of discovered queries may have changed.
|
||||||
|
*/
|
||||||
|
public get onDidChangeQueries(): Event<void> {
|
||||||
|
return this.onDidChangeQueriesEmitter.event;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async discover(): Promise<QueryDiscoveryResults> {
|
||||||
|
const workspaceFolders = getOnDiskWorkspaceFoldersObjects();
|
||||||
|
if (workspaceFolders.length === 0) {
|
||||||
|
return {
|
||||||
|
queries: [],
|
||||||
|
watchPaths: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const queries = await this.discoverQueries(workspaceFolders);
|
||||||
|
|
||||||
|
return {
|
||||||
|
queries,
|
||||||
|
watchPaths: workspaceFolders.map((f) => f.uri),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
protected update(results: QueryDiscoveryResults): void {
|
||||||
|
this.results = results;
|
||||||
|
|
||||||
|
this.watcher.clear();
|
||||||
|
for (const watchPath of results.watchPaths) {
|
||||||
|
// Watch for changes to any `.ql` file
|
||||||
|
this.watcher.addWatch(new RelativePattern(watchPath, "**/*.{ql}"));
|
||||||
|
// need to explicitly watch for changes to directories themselves.
|
||||||
|
this.watcher.addWatch(new RelativePattern(watchPath, "**/"));
|
||||||
|
}
|
||||||
|
this.onDidChangeQueriesEmitter.fire();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Discover all queries in the specified directory and its subdirectories.
|
||||||
|
* @returns A `QueryDirectory` object describing the contents of the directory, or `undefined` if
|
||||||
|
* no queries were found.
|
||||||
|
*/
|
||||||
|
private async discoverQueries(
|
||||||
|
workspaceFolders: readonly WorkspaceFolder[],
|
||||||
|
): Promise<FileTreeDirectory[]> {
|
||||||
|
const rootDirectories = [];
|
||||||
|
for (const workspaceFolder of workspaceFolders) {
|
||||||
|
const root = await this.discoverQueriesInWorkspace(workspaceFolder);
|
||||||
|
if (root !== undefined) {
|
||||||
|
rootDirectories.push(root);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rootDirectories;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async discoverQueriesInWorkspace(
|
||||||
|
workspaceFolder: WorkspaceFolder,
|
||||||
|
): Promise<FileTreeDirectory | undefined> {
|
||||||
|
const fullPath = workspaceFolder.uri.fsPath;
|
||||||
|
const name = workspaceFolder.name;
|
||||||
|
|
||||||
|
// We don't want to log each invocation of resolveQueries, since it clutters up the log.
|
||||||
|
const silent = true;
|
||||||
|
const resolvedQueries = await this.cliServer.resolveQueries(
|
||||||
|
fullPath,
|
||||||
|
silent,
|
||||||
|
);
|
||||||
|
if (resolvedQueries.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const rootDirectory = new FileTreeDirectory(fullPath, name);
|
||||||
|
for (const queryPath of resolvedQueries) {
|
||||||
|
const relativePath = normalize(relative(fullPath, queryPath));
|
||||||
|
const dirName = dirname(relativePath);
|
||||||
|
const parentDirectory = rootDirectory.createDirectory(dirName);
|
||||||
|
parentDirectory.addChild(
|
||||||
|
new FileTreeLeaf(queryPath, basename(queryPath)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
rootDirectory.finish();
|
||||||
|
return rootDirectory;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,74 @@
|
|||||||
|
import { Event, EventEmitter, TreeDataProvider, TreeItem } from "vscode";
|
||||||
|
import { QueryTreeViewItem } from "./query-tree-view-item";
|
||||||
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
|
import { FileTreeNode } from "../common/file-tree-nodes";
|
||||||
|
|
||||||
|
export interface QueryDiscoverer {
|
||||||
|
readonly queries: FileTreeNode[] | undefined;
|
||||||
|
readonly onDidChangeQueries: Event<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class QueryTreeDataProvider
|
||||||
|
extends DisposableObject
|
||||||
|
implements TreeDataProvider<QueryTreeViewItem>
|
||||||
|
{
|
||||||
|
private queryTreeItems: QueryTreeViewItem[];
|
||||||
|
|
||||||
|
private readonly onDidChangeTreeDataEmitter = this.push(
|
||||||
|
new EventEmitter<void>(),
|
||||||
|
);
|
||||||
|
|
||||||
|
public constructor(private readonly queryDiscoverer: QueryDiscoverer) {
|
||||||
|
super();
|
||||||
|
|
||||||
|
queryDiscoverer.onDidChangeQueries(() => {
|
||||||
|
this.queryTreeItems = this.createTree();
|
||||||
|
this.onDidChangeTreeDataEmitter.fire();
|
||||||
|
});
|
||||||
|
|
||||||
|
this.queryTreeItems = this.createTree();
|
||||||
|
}
|
||||||
|
|
||||||
|
public get onDidChangeTreeData(): Event<void> {
|
||||||
|
return this.onDidChangeTreeDataEmitter.event;
|
||||||
|
}
|
||||||
|
|
||||||
|
private createTree(): QueryTreeViewItem[] {
|
||||||
|
return (this.queryDiscoverer.queries || []).map(
|
||||||
|
this.convertFileTreeNode.bind(this),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private convertFileTreeNode(
|
||||||
|
fileTreeDirectory: FileTreeNode,
|
||||||
|
): QueryTreeViewItem {
|
||||||
|
return new QueryTreeViewItem(
|
||||||
|
fileTreeDirectory.name,
|
||||||
|
fileTreeDirectory.path,
|
||||||
|
fileTreeDirectory.children.map(this.convertFileTreeNode.bind(this)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the UI presentation of the element that gets displayed in the view.
|
||||||
|
* @param item The item to represent.
|
||||||
|
* @returns The UI presentation of the item.
|
||||||
|
*/
|
||||||
|
public getTreeItem(item: QueryTreeViewItem): TreeItem {
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when expanding an item (including the root item).
|
||||||
|
* @param item The item to expand.
|
||||||
|
* @returns The children of the item.
|
||||||
|
*/
|
||||||
|
public getChildren(item?: QueryTreeViewItem): QueryTreeViewItem[] {
|
||||||
|
if (!item) {
|
||||||
|
// We're at the root.
|
||||||
|
return this.queryTreeItems;
|
||||||
|
} else {
|
||||||
|
return item.children;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import * as vscode from "vscode";
|
||||||
|
|
||||||
|
export class QueryTreeViewItem extends vscode.TreeItem {
|
||||||
|
constructor(
|
||||||
|
name: string,
|
||||||
|
path: string,
|
||||||
|
public readonly children: QueryTreeViewItem[],
|
||||||
|
) {
|
||||||
|
super(name);
|
||||||
|
this.tooltip = path;
|
||||||
|
this.collapsibleState = this.children.length
|
||||||
|
? vscode.TreeItemCollapsibleState.Collapsed
|
||||||
|
: vscode.TreeItemCollapsibleState.None;
|
||||||
|
if (this.children.length === 0) {
|
||||||
|
this.command = {
|
||||||
|
title: "Open",
|
||||||
|
command: "vscode.open",
|
||||||
|
arguments: [vscode.Uri.file(path)],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -59,7 +59,7 @@ import { tryOpenExternalFile } from "../common/vscode/external-files";
|
|||||||
import {
|
import {
|
||||||
createMultiSelectionCommand,
|
createMultiSelectionCommand,
|
||||||
createSingleSelectionCommand,
|
createSingleSelectionCommand,
|
||||||
} from "../common/selection-commands";
|
} from "../common/vscode/selection-commands";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* query-history-manager.ts
|
* query-history-manager.ts
|
||||||
|
|||||||
@@ -139,6 +139,7 @@ export async function interpretResultsSarif(
|
|||||||
metadata: QueryMetadata | undefined,
|
metadata: QueryMetadata | undefined,
|
||||||
resultsPaths: ResultsPaths,
|
resultsPaths: ResultsPaths,
|
||||||
sourceInfo?: cli.SourceInfo,
|
sourceInfo?: cli.SourceInfo,
|
||||||
|
args?: string[],
|
||||||
): Promise<SarifInterpretationData> {
|
): Promise<SarifInterpretationData> {
|
||||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||||
let res;
|
let res;
|
||||||
@@ -150,6 +151,7 @@ export async function interpretResultsSarif(
|
|||||||
resultsPath,
|
resultsPath,
|
||||||
interpretedResultsPath,
|
interpretedResultsPath,
|
||||||
sourceInfo,
|
sourceInfo,
|
||||||
|
args,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return { ...res, t: "SarifInterpretationData" };
|
return { ...res, t: "SarifInterpretationData" };
|
||||||
|
|||||||
@@ -16,6 +16,10 @@ export interface CoreQueryTarget {
|
|||||||
* `query`.
|
* `query`.
|
||||||
*/
|
*/
|
||||||
quickEvalPosition?: Position;
|
quickEvalPosition?: Position;
|
||||||
|
/**
|
||||||
|
* If this is quick eval, whether to only count the number of results.
|
||||||
|
*/
|
||||||
|
quickEvalCountOnly?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CoreQueryResults {
|
export interface CoreQueryResults {
|
||||||
|
|||||||
@@ -36,7 +36,10 @@ export async function compileAndRunQueryAgainstDatabaseCore(
|
|||||||
const target =
|
const target =
|
||||||
query.quickEvalPosition !== undefined
|
query.quickEvalPosition !== undefined
|
||||||
? {
|
? {
|
||||||
quickEval: { quickEvalPos: query.quickEvalPosition },
|
quickEval: {
|
||||||
|
quickEvalPos: query.quickEvalPosition,
|
||||||
|
countOnly: query.quickEvalCountOnly,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
: { query: {} };
|
: { query: {} };
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { dirname, basename, join, normalize, relative, extname } from "path";
|
import { dirname, basename, normalize, relative, extname } from "path";
|
||||||
import { Discovery } from "../common/discovery";
|
import { Discovery } from "../common/discovery";
|
||||||
import {
|
import {
|
||||||
EventEmitter,
|
EventEmitter,
|
||||||
@@ -6,112 +6,12 @@ import {
|
|||||||
Uri,
|
Uri,
|
||||||
RelativePattern,
|
RelativePattern,
|
||||||
WorkspaceFolder,
|
WorkspaceFolder,
|
||||||
env,
|
|
||||||
} from "vscode";
|
} from "vscode";
|
||||||
import { MultiFileSystemWatcher } from "../common/vscode/multi-file-system-watcher";
|
import { MultiFileSystemWatcher } from "../common/vscode/multi-file-system-watcher";
|
||||||
import { CodeQLCliServer } from "../codeql-cli/cli";
|
import { CodeQLCliServer } from "../codeql-cli/cli";
|
||||||
import { pathExists } from "fs-extra";
|
import { pathExists } from "fs-extra";
|
||||||
|
import { FileTreeDirectory, FileTreeLeaf } from "../common/file-tree-nodes";
|
||||||
/**
|
import { extLogger } from "../common";
|
||||||
* A node in the tree of tests. This will be either a `QLTestDirectory` or a `QLTestFile`.
|
|
||||||
*/
|
|
||||||
export abstract class QLTestNode {
|
|
||||||
constructor(private _path: string, private _name: string) {}
|
|
||||||
|
|
||||||
public get path(): string {
|
|
||||||
return this._path;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get name(): string {
|
|
||||||
return this._name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract get children(): readonly QLTestNode[];
|
|
||||||
|
|
||||||
public abstract finish(): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A directory containing one or more QL tests or other test directories.
|
|
||||||
*/
|
|
||||||
export class QLTestDirectory extends QLTestNode {
|
|
||||||
constructor(
|
|
||||||
_path: string,
|
|
||||||
_name: string,
|
|
||||||
private _children: QLTestNode[] = [],
|
|
||||||
) {
|
|
||||||
super(_path, _name);
|
|
||||||
}
|
|
||||||
|
|
||||||
public get children(): readonly QLTestNode[] {
|
|
||||||
return this._children;
|
|
||||||
}
|
|
||||||
|
|
||||||
public addChild(child: QLTestNode): void {
|
|
||||||
this._children.push(child);
|
|
||||||
}
|
|
||||||
|
|
||||||
public createDirectory(relativePath: string): QLTestDirectory {
|
|
||||||
const dirName = dirname(relativePath);
|
|
||||||
if (dirName === ".") {
|
|
||||||
return this.createChildDirectory(relativePath);
|
|
||||||
} else {
|
|
||||||
const parent = this.createDirectory(dirName);
|
|
||||||
return parent.createDirectory(basename(relativePath));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public finish(): void {
|
|
||||||
// remove empty directories
|
|
||||||
this._children.filter(
|
|
||||||
(child) => child instanceof QLTestFile || child.children.length > 0,
|
|
||||||
);
|
|
||||||
this._children.sort((a, b) => a.name.localeCompare(b.name, env.language));
|
|
||||||
this._children.forEach((child, i) => {
|
|
||||||
child.finish();
|
|
||||||
if (
|
|
||||||
child.children?.length === 1 &&
|
|
||||||
child.children[0] instanceof QLTestDirectory
|
|
||||||
) {
|
|
||||||
// collapse children
|
|
||||||
const replacement = new QLTestDirectory(
|
|
||||||
child.children[0].path,
|
|
||||||
`${child.name} / ${child.children[0].name}`,
|
|
||||||
Array.from(child.children[0].children),
|
|
||||||
);
|
|
||||||
this._children[i] = replacement;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private createChildDirectory(name: string): QLTestDirectory {
|
|
||||||
const existingChild = this._children.find((child) => child.name === name);
|
|
||||||
if (existingChild !== undefined) {
|
|
||||||
return existingChild as QLTestDirectory;
|
|
||||||
} else {
|
|
||||||
const newChild = new QLTestDirectory(join(this.path, name), name);
|
|
||||||
this.addChild(newChild);
|
|
||||||
return newChild;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A single QL test. This will be either a `.ql` file or a `.qlref` file.
|
|
||||||
*/
|
|
||||||
export class QLTestFile extends QLTestNode {
|
|
||||||
constructor(_path: string, _name: string) {
|
|
||||||
super(_path, _name);
|
|
||||||
}
|
|
||||||
|
|
||||||
public get children(): readonly QLTestNode[] {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
public finish(): void {
|
|
||||||
/**/
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The results of discovering QL tests.
|
* The results of discovering QL tests.
|
||||||
@@ -120,7 +20,7 @@ interface QLTestDiscoveryResults {
|
|||||||
/**
|
/**
|
||||||
* A directory that contains one or more QL Tests, or other QLTestDirectories.
|
* A directory that contains one or more QL Tests, or other QLTestDirectories.
|
||||||
*/
|
*/
|
||||||
testDirectory: QLTestDirectory | undefined;
|
testDirectory: FileTreeDirectory | undefined;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The file system path to a directory to watch. If any ql or qlref file changes in
|
* The file system path to a directory to watch. If any ql or qlref file changes in
|
||||||
@@ -137,13 +37,13 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
|||||||
private readonly watcher: MultiFileSystemWatcher = this.push(
|
private readonly watcher: MultiFileSystemWatcher = this.push(
|
||||||
new MultiFileSystemWatcher(),
|
new MultiFileSystemWatcher(),
|
||||||
);
|
);
|
||||||
private _testDirectory: QLTestDirectory | undefined;
|
private _testDirectory: FileTreeDirectory | undefined;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly workspaceFolder: WorkspaceFolder,
|
private readonly workspaceFolder: WorkspaceFolder,
|
||||||
private readonly cliServer: CodeQLCliServer,
|
private readonly cliServer: CodeQLCliServer,
|
||||||
) {
|
) {
|
||||||
super("QL Test Discovery");
|
super("QL Test Discovery", extLogger);
|
||||||
|
|
||||||
this.push(this.watcher.onDidChange(this.handleDidChange, this));
|
this.push(this.watcher.onDidChange(this.handleDidChange, this));
|
||||||
}
|
}
|
||||||
@@ -159,13 +59,13 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
|||||||
* The root directory. There is at least one test in this directory, or
|
* The root directory. There is at least one test in this directory, or
|
||||||
* in a subdirectory of this.
|
* in a subdirectory of this.
|
||||||
*/
|
*/
|
||||||
public get testDirectory(): QLTestDirectory | undefined {
|
public get testDirectory(): FileTreeDirectory | undefined {
|
||||||
return this._testDirectory;
|
return this._testDirectory;
|
||||||
}
|
}
|
||||||
|
|
||||||
private handleDidChange(uri: Uri): void {
|
private handleDidChange(uri: Uri): void {
|
||||||
if (!QLTestDiscovery.ignoreTestPath(uri.fsPath)) {
|
if (!QLTestDiscovery.ignoreTestPath(uri.fsPath)) {
|
||||||
this.refresh();
|
void this.refresh();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
protected async discover(): Promise<QLTestDiscoveryResults> {
|
protected async discover(): Promise<QLTestDiscoveryResults> {
|
||||||
@@ -194,10 +94,10 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
|||||||
* @returns A `QLTestDirectory` object describing the contents of the directory, or `undefined` if
|
* @returns A `QLTestDirectory` object describing the contents of the directory, or `undefined` if
|
||||||
* no tests were found.
|
* no tests were found.
|
||||||
*/
|
*/
|
||||||
private async discoverTests(): Promise<QLTestDirectory> {
|
private async discoverTests(): Promise<FileTreeDirectory> {
|
||||||
const fullPath = this.workspaceFolder.uri.fsPath;
|
const fullPath = this.workspaceFolder.uri.fsPath;
|
||||||
const name = this.workspaceFolder.name;
|
const name = this.workspaceFolder.name;
|
||||||
const rootDirectory = new QLTestDirectory(fullPath, name);
|
const rootDirectory = new FileTreeDirectory(fullPath, name);
|
||||||
|
|
||||||
// Don't try discovery on workspace folders that don't exist on the filesystem
|
// Don't try discovery on workspace folders that don't exist on the filesystem
|
||||||
if (await pathExists(fullPath)) {
|
if (await pathExists(fullPath)) {
|
||||||
@@ -208,7 +108,9 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
|||||||
const relativePath = normalize(relative(fullPath, testPath));
|
const relativePath = normalize(relative(fullPath, testPath));
|
||||||
const dirName = dirname(relativePath);
|
const dirName = dirname(relativePath);
|
||||||
const parentDirectory = rootDirectory.createDirectory(dirName);
|
const parentDirectory = rootDirectory.createDirectory(dirName);
|
||||||
parentDirectory.addChild(new QLTestFile(testPath, basename(testPath)));
|
parentDirectory.addChild(
|
||||||
|
new FileTreeLeaf(testPath, basename(testPath)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
rootDirectory.finish();
|
rootDirectory.finish();
|
||||||
|
|||||||
@@ -13,17 +13,17 @@ import {
|
|||||||
TestHub,
|
TestHub,
|
||||||
} from "vscode-test-adapter-api";
|
} from "vscode-test-adapter-api";
|
||||||
import { TestAdapterRegistrar } from "vscode-test-adapter-util";
|
import { TestAdapterRegistrar } from "vscode-test-adapter-util";
|
||||||
import {
|
import { QLTestDiscovery } from "./qltest-discovery";
|
||||||
QLTestFile,
|
|
||||||
QLTestNode,
|
|
||||||
QLTestDirectory,
|
|
||||||
QLTestDiscovery,
|
|
||||||
} from "./qltest-discovery";
|
|
||||||
import { Event, EventEmitter, CancellationTokenSource } from "vscode";
|
import { Event, EventEmitter, CancellationTokenSource } from "vscode";
|
||||||
import { DisposableObject } from "../pure/disposable-object";
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
import { CodeQLCliServer, TestCompleted } from "../codeql-cli/cli";
|
import { CodeQLCliServer, TestCompleted } from "../codeql-cli/cli";
|
||||||
import { testLogger } from "../common";
|
import { testLogger } from "../common";
|
||||||
import { TestRunner } from "./test-runner";
|
import { TestRunner } from "./test-runner";
|
||||||
|
import {
|
||||||
|
FileTreeDirectory,
|
||||||
|
FileTreeLeaf,
|
||||||
|
FileTreeNode,
|
||||||
|
} from "../common/file-tree-nodes";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the full path of the `.expected` file for the specified QL test.
|
* Get the full path of the `.expected` file for the specified QL test.
|
||||||
@@ -115,7 +115,7 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
|
|||||||
this.qlTestDiscovery = this.push(
|
this.qlTestDiscovery = this.push(
|
||||||
new QLTestDiscovery(workspaceFolder, cliServer),
|
new QLTestDiscovery(workspaceFolder, cliServer),
|
||||||
);
|
);
|
||||||
this.qlTestDiscovery.refresh();
|
void this.qlTestDiscovery.refresh();
|
||||||
|
|
||||||
this.push(this.qlTestDiscovery.onDidChangeTests(this.discoverTests, this));
|
this.push(this.qlTestDiscovery.onDidChangeTests(this.discoverTests, this));
|
||||||
}
|
}
|
||||||
@@ -135,7 +135,7 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static createTestOrSuiteInfos(
|
private static createTestOrSuiteInfos(
|
||||||
testNodes: readonly QLTestNode[],
|
testNodes: readonly FileTreeNode[],
|
||||||
): Array<TestSuiteInfo | TestInfo> {
|
): Array<TestSuiteInfo | TestInfo> {
|
||||||
return testNodes.map((childNode) => {
|
return testNodes.map((childNode) => {
|
||||||
return QLTestAdapter.createTestOrSuiteInfo(childNode);
|
return QLTestAdapter.createTestOrSuiteInfo(childNode);
|
||||||
@@ -143,18 +143,18 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static createTestOrSuiteInfo(
|
private static createTestOrSuiteInfo(
|
||||||
testNode: QLTestNode,
|
testNode: FileTreeNode,
|
||||||
): TestSuiteInfo | TestInfo {
|
): TestSuiteInfo | TestInfo {
|
||||||
if (testNode instanceof QLTestFile) {
|
if (testNode instanceof FileTreeLeaf) {
|
||||||
return QLTestAdapter.createTestInfo(testNode);
|
return QLTestAdapter.createTestInfo(testNode);
|
||||||
} else if (testNode instanceof QLTestDirectory) {
|
} else if (testNode instanceof FileTreeDirectory) {
|
||||||
return QLTestAdapter.createTestSuiteInfo(testNode, testNode.name);
|
return QLTestAdapter.createTestSuiteInfo(testNode, testNode.name);
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Unexpected test type.");
|
throw new Error("Unexpected test type.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static createTestInfo(testFile: QLTestFile): TestInfo {
|
private static createTestInfo(testFile: FileTreeLeaf): TestInfo {
|
||||||
return {
|
return {
|
||||||
type: "test",
|
type: "test",
|
||||||
id: testFile.path,
|
id: testFile.path,
|
||||||
@@ -165,7 +165,7 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static createTestSuiteInfo(
|
private static createTestSuiteInfo(
|
||||||
testDirectory: QLTestDirectory,
|
testDirectory: FileTreeDirectory,
|
||||||
label: string,
|
label: string,
|
||||||
): TestSuiteInfo {
|
): TestSuiteInfo {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -16,12 +16,7 @@ import {
|
|||||||
workspace,
|
workspace,
|
||||||
} from "vscode";
|
} from "vscode";
|
||||||
import { DisposableObject } from "../pure/disposable-object";
|
import { DisposableObject } from "../pure/disposable-object";
|
||||||
import {
|
import { QLTestDiscovery } from "./qltest-discovery";
|
||||||
QLTestDirectory,
|
|
||||||
QLTestDiscovery,
|
|
||||||
QLTestFile,
|
|
||||||
QLTestNode,
|
|
||||||
} from "./qltest-discovery";
|
|
||||||
import { CodeQLCliServer } from "../codeql-cli/cli";
|
import { CodeQLCliServer } from "../codeql-cli/cli";
|
||||||
import { getErrorMessage } from "../pure/helpers-pure";
|
import { getErrorMessage } from "../pure/helpers-pure";
|
||||||
import { BaseLogger, LogOptions } from "../common";
|
import { BaseLogger, LogOptions } from "../common";
|
||||||
@@ -29,6 +24,11 @@ import { TestRunner } from "./test-runner";
|
|||||||
import { TestManagerBase } from "./test-manager-base";
|
import { TestManagerBase } from "./test-manager-base";
|
||||||
import { App } from "../common/app";
|
import { App } from "../common/app";
|
||||||
import { isWorkspaceFolderOnDisk } from "../helpers";
|
import { isWorkspaceFolderOnDisk } from "../helpers";
|
||||||
|
import {
|
||||||
|
FileTreeDirectory,
|
||||||
|
FileTreeLeaf,
|
||||||
|
FileTreeNode,
|
||||||
|
} from "../common/file-tree-nodes";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the complete text content of the specified file. If there is an error reading the file,
|
* Returns the complete text content of the specified file. If there is an error reading the file,
|
||||||
@@ -92,7 +92,7 @@ class WorkspaceFolderHandler extends DisposableObject {
|
|||||||
this.push(
|
this.push(
|
||||||
this.testDiscovery.onDidChangeTests(this.handleDidChangeTests, this),
|
this.testDiscovery.onDidChangeTests(this.handleDidChangeTests, this),
|
||||||
);
|
);
|
||||||
this.testDiscovery.refresh();
|
void this.testDiscovery.refresh();
|
||||||
}
|
}
|
||||||
|
|
||||||
private handleDidChangeTests(): void {
|
private handleDidChangeTests(): void {
|
||||||
@@ -209,7 +209,7 @@ export class TestManager extends TestManagerBase {
|
|||||||
*/
|
*/
|
||||||
public updateTestsForWorkspaceFolder(
|
public updateTestsForWorkspaceFolder(
|
||||||
workspaceFolder: WorkspaceFolder,
|
workspaceFolder: WorkspaceFolder,
|
||||||
testDirectory: QLTestDirectory | undefined,
|
testDirectory: FileTreeDirectory | undefined,
|
||||||
): void {
|
): void {
|
||||||
if (testDirectory !== undefined) {
|
if (testDirectory !== undefined) {
|
||||||
// Adding an item with the same ID as an existing item will replace it, which is exactly what
|
// Adding an item with the same ID as an existing item will replace it, which is exactly what
|
||||||
@@ -229,9 +229,9 @@ export class TestManager extends TestManagerBase {
|
|||||||
/**
|
/**
|
||||||
* Creates a tree of `TestItem`s from the root `QlTestNode` provided by test discovery.
|
* Creates a tree of `TestItem`s from the root `QlTestNode` provided by test discovery.
|
||||||
*/
|
*/
|
||||||
private createTestItemTree(node: QLTestNode, isRoot: boolean): TestItem {
|
private createTestItemTree(node: FileTreeNode, isRoot: boolean): TestItem {
|
||||||
// Prefix the ID to identify it as a directory or a test
|
// Prefix the ID to identify it as a directory or a test
|
||||||
const itemType = node instanceof QLTestDirectory ? "dir" : "test";
|
const itemType = node instanceof FileTreeDirectory ? "dir" : "test";
|
||||||
const testItem = this.testController.createTestItem(
|
const testItem = this.testController.createTestItem(
|
||||||
// For the root of a workspace folder, use the full path as the ID. Otherwise, use the node's
|
// For the root of a workspace folder, use the full path as the ID. Otherwise, use the node's
|
||||||
// name as the ID, since it's shorter but still unique.
|
// name as the ID, since it's shorter but still unique.
|
||||||
@@ -242,7 +242,7 @@ export class TestManager extends TestManagerBase {
|
|||||||
|
|
||||||
for (const childNode of node.children) {
|
for (const childNode of node.children) {
|
||||||
const childItem = this.createTestItemTree(childNode, false);
|
const childItem = this.createTestItemTree(childNode, false);
|
||||||
if (childNode instanceof QLTestFile) {
|
if (childNode instanceof FileTreeLeaf) {
|
||||||
childItem.range = new Range(0, 0, 0, 0);
|
childItem.range = new Range(0, 0, 0, 0);
|
||||||
}
|
}
|
||||||
testItem.children.add(childItem);
|
testItem.children.add(childItem);
|
||||||
|
|||||||
@@ -433,6 +433,7 @@ export function validateQueryPath(
|
|||||||
export interface QuickEvalContext {
|
export interface QuickEvalContext {
|
||||||
quickEvalPosition: messages.Position;
|
quickEvalPosition: messages.Position;
|
||||||
quickEvalText: string;
|
quickEvalText: string;
|
||||||
|
quickEvalCount: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -443,6 +444,7 @@ export interface QuickEvalContext {
|
|||||||
*/
|
*/
|
||||||
export async function getQuickEvalContext(
|
export async function getQuickEvalContext(
|
||||||
range: Range | undefined,
|
range: Range | undefined,
|
||||||
|
isCountOnly: boolean,
|
||||||
): Promise<QuickEvalContext> {
|
): Promise<QuickEvalContext> {
|
||||||
const editor = window.activeTextEditor;
|
const editor = window.activeTextEditor;
|
||||||
if (editor === undefined) {
|
if (editor === undefined) {
|
||||||
@@ -465,6 +467,7 @@ export async function getQuickEvalContext(
|
|||||||
return {
|
return {
|
||||||
quickEvalPosition,
|
quickEvalPosition,
|
||||||
quickEvalText,
|
quickEvalText,
|
||||||
|
quickEvalCount: isCountOnly,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ DataExtensionsEditor.args = {
|
|||||||
"/home/user/vscode-codeql-starter/codeql-custom-queries-java/sql2o/models/sql2o.yml",
|
"/home/user/vscode-codeql-starter/codeql-custom-queries-java/sql2o/models/sql2o.yml",
|
||||||
},
|
},
|
||||||
modelFileExists: true,
|
modelFileExists: true,
|
||||||
|
showLlmButton: true,
|
||||||
},
|
},
|
||||||
initialExternalApiUsages: [
|
initialExternalApiUsages: [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -7,6 +7,43 @@ import {
|
|||||||
VariantAnalysisSubmissionRequest,
|
VariantAnalysisSubmissionRequest,
|
||||||
} from "./variant-analysis";
|
} from "./variant-analysis";
|
||||||
import { Repository } from "./repository";
|
import { Repository } from "./repository";
|
||||||
|
import { Progress } from "vscode";
|
||||||
|
import { CancellationToken } from "vscode-jsonrpc";
|
||||||
|
|
||||||
|
export async function getCodeSearchRepositories(
|
||||||
|
credentials: Credentials,
|
||||||
|
query: string,
|
||||||
|
progress: Progress<{
|
||||||
|
message?: string | undefined;
|
||||||
|
increment?: number | undefined;
|
||||||
|
}>,
|
||||||
|
token: CancellationToken,
|
||||||
|
): Promise<string[]> {
|
||||||
|
let nwos: string[] = [];
|
||||||
|
const octokit = await credentials.getOctokit();
|
||||||
|
for await (const response of octokit.paginate.iterator(
|
||||||
|
octokit.rest.search.repos,
|
||||||
|
{
|
||||||
|
q: query,
|
||||||
|
per_page: 100,
|
||||||
|
},
|
||||||
|
)) {
|
||||||
|
nwos.push(...response.data.map((item) => item.full_name));
|
||||||
|
// calculate progress bar: 80% of the progress bar is used for the code search
|
||||||
|
const totalNumberOfRequests = Math.ceil(response.data.total_count / 100);
|
||||||
|
// Since we have a maximum 10 of requests, we use a fixed increment whenever the totalNumberOfRequests is greater than 10
|
||||||
|
const increment =
|
||||||
|
totalNumberOfRequests < 10 ? 80 / totalNumberOfRequests : 8;
|
||||||
|
progress.report({ increment });
|
||||||
|
|
||||||
|
if (token.isCancellationRequested) {
|
||||||
|
nwos = [];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...new Set(nwos)];
|
||||||
|
}
|
||||||
|
|
||||||
export async function submitVariantAnalysis(
|
export async function submitVariantAnalysis(
|
||||||
credentials: Credentials,
|
credentials: Credentials,
|
||||||
|
|||||||
@@ -116,12 +116,16 @@ async function generateQueryPack(
|
|||||||
|
|
||||||
let precompilationOpts: string[] = [];
|
let precompilationOpts: string[] = [];
|
||||||
if (await cliServer.cliConstraints.supportsQlxRemote()) {
|
if (await cliServer.cliConstraints.supportsQlxRemote()) {
|
||||||
const ccache = join(originalPackRoot, ".cache");
|
if (await cliServer.cliConstraints.usesGlobalCompilationCache()) {
|
||||||
precompilationOpts = [
|
precompilationOpts = ["--qlx"];
|
||||||
"--qlx",
|
} else {
|
||||||
"--no-default-compilation-cache",
|
const ccache = join(originalPackRoot, ".cache");
|
||||||
`--compilation-cache=${ccache}`,
|
precompilationOpts = [
|
||||||
];
|
"--qlx",
|
||||||
|
"--no-default-compilation-cache",
|
||||||
|
`--compilation-cache=${ccache}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
precompilationOpts = ["--no-precompile"];
|
precompilationOpts = ["--no-precompile"];
|
||||||
}
|
}
|
||||||
@@ -379,7 +383,6 @@ async function fixPackFile(
|
|||||||
}
|
}
|
||||||
const qlpack = load(await readFile(packPath, "utf8")) as QlPack;
|
const qlpack = load(await readFile(packPath, "utf8")) as QlPack;
|
||||||
|
|
||||||
qlpack.name = QUERY_PACK_NAME;
|
|
||||||
updateDefaultSuite(qlpack, packRelativePath);
|
updateDefaultSuite(qlpack, packRelativePath);
|
||||||
removeWorkspaceRefs(qlpack);
|
removeWorkspaceRefs(qlpack);
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import {
|
|||||||
getVariantAnalysisRepo,
|
getVariantAnalysisRepo,
|
||||||
} from "./gh-api/gh-api-client";
|
} from "./gh-api/gh-api-client";
|
||||||
import {
|
import {
|
||||||
|
authentication,
|
||||||
|
AuthenticationSessionsChangeEvent,
|
||||||
CancellationToken,
|
CancellationToken,
|
||||||
env,
|
env,
|
||||||
EventEmitter,
|
EventEmitter,
|
||||||
@@ -72,6 +74,11 @@ import {
|
|||||||
REPO_STATES_FILENAME,
|
REPO_STATES_FILENAME,
|
||||||
writeRepoStates,
|
writeRepoStates,
|
||||||
} from "./repo-states-store";
|
} from "./repo-states-store";
|
||||||
|
import { GITHUB_AUTH_PROVIDER_ID } from "../common/vscode/authentication";
|
||||||
|
import { FetchError } from "node-fetch";
|
||||||
|
import { extLogger } from "../common";
|
||||||
|
|
||||||
|
const maxRetryCount = 3;
|
||||||
|
|
||||||
export class VariantAnalysisManager
|
export class VariantAnalysisManager
|
||||||
extends DisposableObject
|
extends DisposableObject
|
||||||
@@ -131,6 +138,10 @@ export class VariantAnalysisManager
|
|||||||
this.variantAnalysisResultsManager.onResultLoaded(
|
this.variantAnalysisResultsManager.onResultLoaded(
|
||||||
this.onRepoResultLoaded.bind(this),
|
this.onRepoResultLoaded.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.push(
|
||||||
|
authentication.onDidChangeSessions(this.onDidChangeSessions.bind(this)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
getCommands(): VariantAnalysisCommands {
|
getCommands(): VariantAnalysisCommands {
|
||||||
@@ -144,6 +155,8 @@ export class VariantAnalysisManager
|
|||||||
this.monitorVariantAnalysis.bind(this),
|
this.monitorVariantAnalysis.bind(this),
|
||||||
"codeQL.monitorRehydratedVariantAnalysis":
|
"codeQL.monitorRehydratedVariantAnalysis":
|
||||||
this.monitorVariantAnalysis.bind(this),
|
this.monitorVariantAnalysis.bind(this),
|
||||||
|
"codeQL.monitorReauthenticatedVariantAnalysis":
|
||||||
|
this.monitorVariantAnalysis.bind(this),
|
||||||
"codeQL.openVariantAnalysisLogs": this.openVariantAnalysisLogs.bind(this),
|
"codeQL.openVariantAnalysisLogs": this.openVariantAnalysisLogs.bind(this),
|
||||||
"codeQL.openVariantAnalysisView": this.showView.bind(this),
|
"codeQL.openVariantAnalysisView": this.showView.bind(this),
|
||||||
"codeQL.runVariantAnalysis":
|
"codeQL.runVariantAnalysis":
|
||||||
@@ -504,6 +517,38 @@ export class VariantAnalysisManager
|
|||||||
repoStates[repoState.repositoryId] = repoState;
|
repoStates[repoState.repositoryId] = repoState;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async onDidChangeSessions(
|
||||||
|
event: AuthenticationSessionsChangeEvent,
|
||||||
|
): Promise<void> {
|
||||||
|
if (event.provider.id !== GITHUB_AUTH_PROVIDER_ID) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const variantAnalysis of this.variantAnalyses.values()) {
|
||||||
|
if (
|
||||||
|
this.variantAnalysisMonitor.isMonitoringVariantAnalysis(
|
||||||
|
variantAnalysis.id,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
await isVariantAnalysisComplete(
|
||||||
|
variantAnalysis,
|
||||||
|
this.makeResultDownloadChecker(variantAnalysis),
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
void this.app.commands.execute(
|
||||||
|
"codeQL.monitorReauthenticatedVariantAnalysis",
|
||||||
|
variantAnalysis,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public async monitorVariantAnalysis(
|
public async monitorVariantAnalysis(
|
||||||
variantAnalysis: VariantAnalysis,
|
variantAnalysis: VariantAnalysis,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
@@ -572,12 +617,35 @@ export class VariantAnalysisManager
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
await this.variantAnalysisResultsManager.download(
|
let retry = 0;
|
||||||
variantAnalysis.id,
|
for (;;) {
|
||||||
repoTask,
|
try {
|
||||||
this.getVariantAnalysisStorageLocation(variantAnalysis.id),
|
await this.variantAnalysisResultsManager.download(
|
||||||
updateRepoStateCallback,
|
variantAnalysis.id,
|
||||||
);
|
repoTask,
|
||||||
|
this.getVariantAnalysisStorageLocation(variantAnalysis.id),
|
||||||
|
updateRepoStateCallback,
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
} catch (e) {
|
||||||
|
if (
|
||||||
|
retry++ < maxRetryCount &&
|
||||||
|
e instanceof FetchError &&
|
||||||
|
(e.code === "ETIMEDOUT" || e.code === "ECONNRESET")
|
||||||
|
) {
|
||||||
|
void extLogger.log(
|
||||||
|
`Timeout while trying to download variant analysis with id: ${
|
||||||
|
variantAnalysis.id
|
||||||
|
}. Error: ${getErrorMessage(e)}. Retrying...`,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
void extLogger.log(
|
||||||
|
`Failed to download variant analysis after ${retry} attempts.`,
|
||||||
|
);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
repoState.downloadStatus =
|
repoState.downloadStatus =
|
||||||
VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { env, EventEmitter } from "vscode";
|
import { env, EventEmitter } from "vscode";
|
||||||
import { getVariantAnalysis } from "./gh-api/gh-api-client";
|
import { getVariantAnalysis } from "./gh-api/gh-api-client";
|
||||||
|
import { RequestError } from "@octokit/request-error";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
isFinalVariantAnalysisStatus,
|
isFinalVariantAnalysisStatus,
|
||||||
@@ -27,6 +28,8 @@ export class VariantAnalysisMonitor extends DisposableObject {
|
|||||||
);
|
);
|
||||||
readonly onVariantAnalysisChange = this._onVariantAnalysisChange.event;
|
readonly onVariantAnalysisChange = this._onVariantAnalysisChange.event;
|
||||||
|
|
||||||
|
private readonly monitoringVariantAnalyses = new Set<number>();
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly app: App,
|
private readonly app: App,
|
||||||
private readonly shouldCancelMonitor: (
|
private readonly shouldCancelMonitor: (
|
||||||
@@ -36,9 +39,37 @@ export class VariantAnalysisMonitor extends DisposableObject {
|
|||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public isMonitoringVariantAnalysis(variantAnalysisId: number): boolean {
|
||||||
|
return this.monitoringVariantAnalyses.has(variantAnalysisId);
|
||||||
|
}
|
||||||
|
|
||||||
public async monitorVariantAnalysis(
|
public async monitorVariantAnalysis(
|
||||||
variantAnalysis: VariantAnalysis,
|
variantAnalysis: VariantAnalysis,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
if (this.monitoringVariantAnalyses.has(variantAnalysis.id)) {
|
||||||
|
void extLogger.log(
|
||||||
|
`Already monitoring variant analysis ${variantAnalysis.id}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.monitoringVariantAnalyses.add(variantAnalysis.id);
|
||||||
|
try {
|
||||||
|
await this._monitorVariantAnalysis(variantAnalysis);
|
||||||
|
} finally {
|
||||||
|
this.monitoringVariantAnalyses.delete(variantAnalysis.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _monitorVariantAnalysis(
|
||||||
|
variantAnalysis: VariantAnalysis,
|
||||||
|
): Promise<void> {
|
||||||
|
const variantAnalysisLabel = `${variantAnalysis.query.name} (${
|
||||||
|
variantAnalysis.query.language
|
||||||
|
}) [${new Date(variantAnalysis.executionStartTime).toLocaleString(
|
||||||
|
env.language,
|
||||||
|
)}]`;
|
||||||
|
|
||||||
let attemptCount = 0;
|
let attemptCount = 0;
|
||||||
const scannedReposDownloaded: number[] = [];
|
const scannedReposDownloaded: number[] = [];
|
||||||
|
|
||||||
@@ -61,11 +92,7 @@ export class VariantAnalysisMonitor extends DisposableObject {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
const errorMessage = getErrorMessage(e);
|
const errorMessage = getErrorMessage(e);
|
||||||
|
|
||||||
const message = `Error while monitoring variant analysis ${
|
const message = `Error while monitoring variant analysis ${variantAnalysisLabel}: ${errorMessage}`;
|
||||||
variantAnalysis.query.name
|
|
||||||
} (${variantAnalysis.query.language}) [${new Date(
|
|
||||||
variantAnalysis.executionStartTime,
|
|
||||||
).toLocaleString(env.language)}]: ${errorMessage}`;
|
|
||||||
|
|
||||||
// If we have already shown this error to the user, don't show it again.
|
// If we have already shown this error to the user, don't show it again.
|
||||||
if (lastErrorShown === errorMessage) {
|
if (lastErrorShown === errorMessage) {
|
||||||
@@ -75,6 +102,19 @@ export class VariantAnalysisMonitor extends DisposableObject {
|
|||||||
lastErrorShown = errorMessage;
|
lastErrorShown = errorMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (e instanceof RequestError && e.status === 404) {
|
||||||
|
// We want to show the error message to the user, but we don't want to
|
||||||
|
// keep polling for the variant analysis if it no longer exists.
|
||||||
|
// Therefore, this block is down here rather than at the top of the
|
||||||
|
// catch block.
|
||||||
|
void extLogger.log(
|
||||||
|
`Variant analysis ${variantAnalysisLabel} no longer exists or is no longer accessible, stopping monitoring.`,
|
||||||
|
);
|
||||||
|
// Cancel monitoring on 404, as this probably means the user does not have access to it anymore
|
||||||
|
// e.g. lost access to repo, or repo was deleted
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { appendFile, pathExists } from "fs-extra";
|
import { appendFile, pathExists, rm } from "fs-extra";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { EOL } from "os";
|
import { EOL } from "os";
|
||||||
import { join } from "path";
|
import { join } from "path";
|
||||||
@@ -82,6 +82,9 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
|||||||
|
|
||||||
const zipFilePath = join(resultDirectory, "results.zip");
|
const zipFilePath = join(resultDirectory, "results.zip");
|
||||||
|
|
||||||
|
// in case of restarted download delete possible artifact from previous download
|
||||||
|
await rm(zipFilePath, { force: true });
|
||||||
|
|
||||||
const response = await fetch(repoTask.artifactUrl);
|
const response = await fetch(repoTask.artifactUrl);
|
||||||
|
|
||||||
let responseSize = parseInt(response.headers.get("content-length") || "0");
|
let responseSize = parseInt(response.headers.get("content-length") || "0");
|
||||||
|
|||||||
@@ -59,9 +59,7 @@ export function Compare(_: Record<string, never>): JSX.Element {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="vscode-codeql__compare-header">
|
<div className="vscode-codeql__compare-header">
|
||||||
<div className="vscode-codeql__compare-header-item">
|
<div className="vscode-codeql__compare-header-item">Comparing:</div>
|
||||||
Table to compare:
|
|
||||||
</div>
|
|
||||||
<CompareSelector
|
<CompareSelector
|
||||||
availableResultSets={comparison.commonResultSetNames}
|
availableResultSets={comparison.commonResultSetNames}
|
||||||
currentResultSetName={comparison.currentResultSetName}
|
currentResultSetName={comparison.currentResultSetName}
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ interface Props {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export default function CompareSelector(props: Props) {
|
export default function CompareSelector(props: Props) {
|
||||||
return (
|
return props.availableResultSets.length ? (
|
||||||
|
// Handle case where there are shared result sets
|
||||||
<select
|
<select
|
||||||
value={props.currentResultSetName}
|
value={props.currentResultSetName}
|
||||||
onChange={(e) => props.updateResultSet(e.target.value)}
|
onChange={(e) => props.updateResultSet(e.target.value)}
|
||||||
@@ -18,5 +19,8 @@ export default function CompareSelector(props: Props) {
|
|||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
</select>
|
</select>
|
||||||
|
) : (
|
||||||
|
// Handle case where there are no shared result sets
|
||||||
|
<div>{props.currentResultSetName}</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -157,6 +157,14 @@ export function DataExtensionsEditor({
|
|||||||
});
|
});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const onGenerateFromLlmClick = useCallback(() => {
|
||||||
|
vscode.postMessage({
|
||||||
|
t: "generateExternalApiFromLlm",
|
||||||
|
externalApiUsages,
|
||||||
|
modeledMethods,
|
||||||
|
});
|
||||||
|
}, [externalApiUsages, modeledMethods]);
|
||||||
|
|
||||||
const onOpenExtensionPackClick = useCallback(() => {
|
const onOpenExtensionPackClick = useCallback(() => {
|
||||||
vscode.postMessage({
|
vscode.postMessage({
|
||||||
t: "openExtensionPack",
|
t: "openExtensionPack",
|
||||||
@@ -214,6 +222,14 @@ export function DataExtensionsEditor({
|
|||||||
<VSCodeButton onClick={onGenerateClick}>
|
<VSCodeButton onClick={onGenerateClick}>
|
||||||
Download and generate
|
Download and generate
|
||||||
</VSCodeButton>
|
</VSCodeButton>
|
||||||
|
{viewState?.showLlmButton && (
|
||||||
|
<>
|
||||||
|
|
||||||
|
<VSCodeButton onClick={onGenerateFromLlmClick}>
|
||||||
|
Generate using LLM
|
||||||
|
</VSCodeButton>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
<br />
|
<br />
|
||||||
<br />
|
<br />
|
||||||
<VSCodeDataGrid>
|
<VSCodeDataGrid>
|
||||||
|
|||||||
@@ -14,6 +14,9 @@ export type VariantAnalysisActionsProps = {
|
|||||||
onExportResultsClick: () => void;
|
onExportResultsClick: () => void;
|
||||||
copyRepositoryListDisabled?: boolean;
|
copyRepositoryListDisabled?: boolean;
|
||||||
exportResultsDisabled?: boolean;
|
exportResultsDisabled?: boolean;
|
||||||
|
|
||||||
|
hasSelectedRepositories?: boolean;
|
||||||
|
hasFilteredRepositories?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
const Container = styled.div`
|
const Container = styled.div`
|
||||||
@@ -26,6 +29,28 @@ const Button = styled(VSCodeButton)`
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
const chooseText = ({
|
||||||
|
hasSelectedRepositories,
|
||||||
|
hasFilteredRepositories,
|
||||||
|
normalText,
|
||||||
|
selectedText,
|
||||||
|
filteredText,
|
||||||
|
}: {
|
||||||
|
hasSelectedRepositories?: boolean;
|
||||||
|
hasFilteredRepositories?: boolean;
|
||||||
|
normalText: string;
|
||||||
|
selectedText: string;
|
||||||
|
filteredText: string;
|
||||||
|
}) => {
|
||||||
|
if (hasSelectedRepositories) {
|
||||||
|
return selectedText;
|
||||||
|
}
|
||||||
|
if (hasFilteredRepositories) {
|
||||||
|
return filteredText;
|
||||||
|
}
|
||||||
|
return normalText;
|
||||||
|
};
|
||||||
|
|
||||||
export const VariantAnalysisActions = ({
|
export const VariantAnalysisActions = ({
|
||||||
variantAnalysisStatus,
|
variantAnalysisStatus,
|
||||||
onStopQueryClick,
|
onStopQueryClick,
|
||||||
@@ -35,6 +60,8 @@ export const VariantAnalysisActions = ({
|
|||||||
onExportResultsClick,
|
onExportResultsClick,
|
||||||
copyRepositoryListDisabled,
|
copyRepositoryListDisabled,
|
||||||
exportResultsDisabled,
|
exportResultsDisabled,
|
||||||
|
hasSelectedRepositories,
|
||||||
|
hasFilteredRepositories,
|
||||||
}: VariantAnalysisActionsProps) => {
|
}: VariantAnalysisActionsProps) => {
|
||||||
return (
|
return (
|
||||||
<Container>
|
<Container>
|
||||||
@@ -45,14 +72,26 @@ export const VariantAnalysisActions = ({
|
|||||||
onClick={onCopyRepositoryListClick}
|
onClick={onCopyRepositoryListClick}
|
||||||
disabled={copyRepositoryListDisabled}
|
disabled={copyRepositoryListDisabled}
|
||||||
>
|
>
|
||||||
Copy repository list
|
{chooseText({
|
||||||
|
hasSelectedRepositories,
|
||||||
|
hasFilteredRepositories,
|
||||||
|
normalText: "Copy repository list",
|
||||||
|
selectedText: "Copy selected repositories as a list",
|
||||||
|
filteredText: "Copy filtered repositories as a list",
|
||||||
|
})}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
appearance="primary"
|
appearance="primary"
|
||||||
onClick={onExportResultsClick}
|
onClick={onExportResultsClick}
|
||||||
disabled={exportResultsDisabled}
|
disabled={exportResultsDisabled}
|
||||||
>
|
>
|
||||||
Export results
|
{chooseText({
|
||||||
|
hasSelectedRepositories,
|
||||||
|
hasFilteredRepositories,
|
||||||
|
normalText: "Export results",
|
||||||
|
selectedText: "Export selected results",
|
||||||
|
filteredText: "Export filtered results",
|
||||||
|
})}
|
||||||
</Button>
|
</Button>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -131,6 +131,13 @@ export const VariantAnalysisHeader = ({
|
|||||||
stopQueryDisabled={!variantAnalysis.actionsWorkflowRunId}
|
stopQueryDisabled={!variantAnalysis.actionsWorkflowRunId}
|
||||||
exportResultsDisabled={!hasDownloadedRepos}
|
exportResultsDisabled={!hasDownloadedRepos}
|
||||||
copyRepositoryListDisabled={!hasReposWithResults}
|
copyRepositoryListDisabled={!hasReposWithResults}
|
||||||
|
hasFilteredRepositories={
|
||||||
|
variantAnalysis.scannedRepos?.length !==
|
||||||
|
filteredRepositories?.length
|
||||||
|
}
|
||||||
|
hasSelectedRepositories={
|
||||||
|
selectedRepositoryIds && selectedRepositoryIds.length > 0
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
</Row>
|
</Row>
|
||||||
<VariantAnalysisStats
|
<VariantAnalysisStats
|
||||||
|
|||||||
@@ -93,4 +93,32 @@ describe(VariantAnalysisActions.name, () => {
|
|||||||
|
|
||||||
expect(container.querySelectorAll("vscode-button").length).toEqual(0);
|
expect(container.querySelectorAll("vscode-button").length).toEqual(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("changes the text on the buttons when repositories are selected", async () => {
|
||||||
|
render({
|
||||||
|
variantAnalysisStatus: VariantAnalysisStatus.Succeeded,
|
||||||
|
showResultActions: true,
|
||||||
|
hasSelectedRepositories: true,
|
||||||
|
hasFilteredRepositories: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText("Export selected results")).toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.getByText("Copy selected repositories as a list"),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("changes the text on the buttons when repositories are filtered", async () => {
|
||||||
|
render({
|
||||||
|
variantAnalysisStatus: VariantAnalysisStatus.Succeeded,
|
||||||
|
showResultActions: true,
|
||||||
|
hasSelectedRepositories: false,
|
||||||
|
hasFilteredRepositories: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText("Export filtered results")).toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.getByText("Copy filtered repositories as a list"),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[
|
[
|
||||||
"v2.13.1",
|
"v2.13.3",
|
||||||
"v2.12.7",
|
"v2.12.7",
|
||||||
"v2.11.6",
|
"v2.11.6",
|
||||||
"v2.7.6",
|
"v2.7.6",
|
||||||
|
|||||||
@@ -8,6 +8,11 @@ import { testCredentialsWithStub } from "../factories/authentication";
|
|||||||
import { Credentials } from "../../src/common/authentication";
|
import { Credentials } from "../../src/common/authentication";
|
||||||
import { AppCommandManager } from "../../src/common/commands";
|
import { AppCommandManager } from "../../src/common/commands";
|
||||||
import { createMockCommandManager } from "./commandsMock";
|
import { createMockCommandManager } from "./commandsMock";
|
||||||
|
import type {
|
||||||
|
Event,
|
||||||
|
WorkspaceFolder,
|
||||||
|
WorkspaceFoldersChangeEvent,
|
||||||
|
} from "vscode";
|
||||||
|
|
||||||
export function createMockApp({
|
export function createMockApp({
|
||||||
extensionPath = "/mock/extension/path",
|
extensionPath = "/mock/extension/path",
|
||||||
@@ -15,6 +20,8 @@ export function createMockApp({
|
|||||||
globalStoragePath = "/mock/global/storage/path",
|
globalStoragePath = "/mock/global/storage/path",
|
||||||
createEventEmitter = <T>() => new MockAppEventEmitter<T>(),
|
createEventEmitter = <T>() => new MockAppEventEmitter<T>(),
|
||||||
workspaceState = createMockMemento(),
|
workspaceState = createMockMemento(),
|
||||||
|
workspaceFolders = [],
|
||||||
|
onDidChangeWorkspaceFolders = jest.fn(),
|
||||||
credentials = testCredentialsWithStub(),
|
credentials = testCredentialsWithStub(),
|
||||||
commands = createMockCommandManager(),
|
commands = createMockCommandManager(),
|
||||||
}: {
|
}: {
|
||||||
@@ -23,6 +30,8 @@ export function createMockApp({
|
|||||||
globalStoragePath?: string;
|
globalStoragePath?: string;
|
||||||
createEventEmitter?: <T>() => AppEventEmitter<T>;
|
createEventEmitter?: <T>() => AppEventEmitter<T>;
|
||||||
workspaceState?: Memento;
|
workspaceState?: Memento;
|
||||||
|
workspaceFolders?: readonly WorkspaceFolder[] | undefined;
|
||||||
|
onDidChangeWorkspaceFolders?: Event<WorkspaceFoldersChangeEvent>;
|
||||||
credentials?: Credentials;
|
credentials?: Credentials;
|
||||||
commands?: AppCommandManager;
|
commands?: AppCommandManager;
|
||||||
}): App {
|
}): App {
|
||||||
@@ -34,6 +43,8 @@ export function createMockApp({
|
|||||||
workspaceStoragePath,
|
workspaceStoragePath,
|
||||||
globalStoragePath,
|
globalStoragePath,
|
||||||
workspaceState,
|
workspaceState,
|
||||||
|
workspaceFolders,
|
||||||
|
onDidChangeWorkspaceFolders,
|
||||||
createEventEmitter,
|
createEventEmitter,
|
||||||
credentials,
|
credentials,
|
||||||
commands,
|
commands,
|
||||||
@@ -52,4 +63,8 @@ export class MockAppEventEmitter<T> implements AppEventEmitter<T> {
|
|||||||
public fire(): void {
|
public fire(): void {
|
||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public dispose() {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ export function createMockDB(
|
|||||||
datasetUri: databaseUri,
|
datasetUri: databaseUri,
|
||||||
} as DatabaseContents,
|
} as DatabaseContents,
|
||||||
dbOptions,
|
dbOptions,
|
||||||
() => void 0,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,143 @@
|
|||||||
|
import type { Memento } from "vscode";
|
||||||
|
import { InvocationRateLimiter } from "../../../src/common/invocation-rate-limiter";
|
||||||
|
|
||||||
|
describe("Invocation rate limiter", () => {
|
||||||
|
// 1 January 2020
|
||||||
|
let currentUnixTime = 1577836800;
|
||||||
|
|
||||||
|
function createDate(dateString?: string): Date {
|
||||||
|
if (dateString) {
|
||||||
|
return new Date(dateString);
|
||||||
|
}
|
||||||
|
const numMillisecondsPerSecond = 1000;
|
||||||
|
return new Date(currentUnixTime * numMillisecondsPerSecond);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createInvocationRateLimiter<T>(
|
||||||
|
funcIdentifier: string,
|
||||||
|
func: () => Promise<T>,
|
||||||
|
): InvocationRateLimiter<T> {
|
||||||
|
return new InvocationRateLimiter(
|
||||||
|
new MockMemento(),
|
||||||
|
funcIdentifier,
|
||||||
|
func,
|
||||||
|
(s) => createDate(s),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
class MockMemento implements Memento {
|
||||||
|
keys(): readonly string[] {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
map = new Map<any, any>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a value.
|
||||||
|
*
|
||||||
|
* @param key A string.
|
||||||
|
* @param defaultValue A value that should be returned when there is no
|
||||||
|
* value (`undefined`) with the given key.
|
||||||
|
* @return The stored value or the defaultValue.
|
||||||
|
*/
|
||||||
|
get<T>(key: string, defaultValue?: T): T {
|
||||||
|
return this.map.has(key) ? this.map.get(key) : defaultValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store a value. The value must be JSON-stringifyable.
|
||||||
|
*
|
||||||
|
* @param key A string.
|
||||||
|
* @param value A value. MUST not contain cyclic references.
|
||||||
|
*/
|
||||||
|
async update(key: string, value: any): Promise<void> {
|
||||||
|
this.map.set(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it("initially invokes function", async () => {
|
||||||
|
let numTimesFuncCalled = 0;
|
||||||
|
const invocationRateLimiter = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
expect(numTimesFuncCalled).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("doesn't invoke function again if no time has passed", async () => {
|
||||||
|
let numTimesFuncCalled = 0;
|
||||||
|
const invocationRateLimiter = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
expect(numTimesFuncCalled).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("doesn't invoke function again if requested time since last invocation hasn't passed", async () => {
|
||||||
|
let numTimesFuncCalled = 0;
|
||||||
|
const invocationRateLimiter = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
currentUnixTime += 1;
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(2);
|
||||||
|
expect(numTimesFuncCalled).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("invokes function again immediately if requested time since last invocation is 0 seconds", async () => {
|
||||||
|
let numTimesFuncCalled = 0;
|
||||||
|
const invocationRateLimiter = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(0);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(0);
|
||||||
|
expect(numTimesFuncCalled).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("invokes function again after requested time since last invocation has elapsed", async () => {
|
||||||
|
let numTimesFuncCalled = 0;
|
||||||
|
const invocationRateLimiter = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(1);
|
||||||
|
currentUnixTime += 1;
|
||||||
|
await invocationRateLimiter.invokeFunctionIfIntervalElapsed(1);
|
||||||
|
expect(numTimesFuncCalled).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("invokes functions with different rate limiters", async () => {
|
||||||
|
let numTimesFuncACalled = 0;
|
||||||
|
const invocationRateLimiterA = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncACalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let numTimesFuncBCalled = 0;
|
||||||
|
const invocationRateLimiterB = createInvocationRateLimiter(
|
||||||
|
"funcid",
|
||||||
|
async () => {
|
||||||
|
numTimesFuncBCalled++;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await invocationRateLimiterA.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
await invocationRateLimiterB.invokeFunctionIfIntervalElapsed(100);
|
||||||
|
expect(numTimesFuncACalled).toBe(1);
|
||||||
|
expect(numTimesFuncBCalled).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,471 @@
|
|||||||
|
import {
|
||||||
|
compareInputOutput,
|
||||||
|
createAutoModelRequest,
|
||||||
|
parsePredictedClassifications,
|
||||||
|
} from "../../../src/data-extensions-editor/auto-model";
|
||||||
|
import { ExternalApiUsage } from "../../../src/data-extensions-editor/external-api-usage";
|
||||||
|
import { ModeledMethod } from "../../../src/data-extensions-editor/modeled-method";
|
||||||
|
import {
|
||||||
|
ClassificationType,
|
||||||
|
Method,
|
||||||
|
} from "../../../src/data-extensions-editor/auto-model-api";
|
||||||
|
|
||||||
|
describe("createAutoModelRequest", () => {
|
||||||
|
const externalApiUsages: ExternalApiUsage[] = [
|
||||||
|
{
|
||||||
|
signature:
|
||||||
|
"org.springframework.boot.SpringApplication#run(Class,String[])",
|
||||||
|
packageName: "org.springframework.boot",
|
||||||
|
typeName: "SpringApplication",
|
||||||
|
methodName: "run",
|
||||||
|
methodParameters: "(Class,String[])",
|
||||||
|
supported: false,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "run(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/Sql2oExampleApplication.java",
|
||||||
|
startLine: 9,
|
||||||
|
startColumn: 9,
|
||||||
|
endLine: 9,
|
||||||
|
endColumn: 66,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "org.sql2o.Connection#createQuery(String)",
|
||||||
|
packageName: "org.sql2o",
|
||||||
|
typeName: "Connection",
|
||||||
|
methodName: "createQuery",
|
||||||
|
methodParameters: "(String)",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "createQuery(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 15,
|
||||||
|
startColumn: 13,
|
||||||
|
endLine: 15,
|
||||||
|
endColumn: 56,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "createQuery(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 26,
|
||||||
|
startColumn: 13,
|
||||||
|
endLine: 26,
|
||||||
|
endColumn: 39,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "org.sql2o.Query#executeScalar(Class)",
|
||||||
|
packageName: "org.sql2o",
|
||||||
|
typeName: "Query",
|
||||||
|
methodName: "executeScalar",
|
||||||
|
methodParameters: "(Class)",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "executeScalar(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 15,
|
||||||
|
startColumn: 13,
|
||||||
|
endLine: 15,
|
||||||
|
endColumn: 85,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "executeScalar(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 26,
|
||||||
|
startColumn: 13,
|
||||||
|
endLine: 26,
|
||||||
|
endColumn: 68,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "org.sql2o.Sql2o#open()",
|
||||||
|
packageName: "org.sql2o",
|
||||||
|
typeName: "Sql2o",
|
||||||
|
methodName: "open",
|
||||||
|
methodParameters: "()",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "open(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 14,
|
||||||
|
startColumn: 24,
|
||||||
|
endLine: 14,
|
||||||
|
endColumn: 35,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "open(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 25,
|
||||||
|
startColumn: 24,
|
||||||
|
endLine: 25,
|
||||||
|
endColumn: 35,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "java.io.PrintStream#println(String)",
|
||||||
|
packageName: "java.io",
|
||||||
|
typeName: "PrintStream",
|
||||||
|
methodName: "println",
|
||||||
|
methodParameters: "(String)",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "println(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 29,
|
||||||
|
startColumn: 9,
|
||||||
|
endLine: 29,
|
||||||
|
endColumn: 49,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "org.sql2o.Sql2o#Sql2o(String,String,String)",
|
||||||
|
packageName: "org.sql2o",
|
||||||
|
typeName: "Sql2o",
|
||||||
|
methodName: "Sql2o",
|
||||||
|
methodParameters: "(String,String,String)",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "new Sql2o(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 10,
|
||||||
|
startColumn: 33,
|
||||||
|
endLine: 10,
|
||||||
|
endColumn: 88,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "org.sql2o.Sql2o#Sql2o(String)",
|
||||||
|
packageName: "org.sql2o",
|
||||||
|
typeName: "Sql2o",
|
||||||
|
methodName: "Sql2o",
|
||||||
|
methodParameters: "(String)",
|
||||||
|
supported: true,
|
||||||
|
usages: [
|
||||||
|
{
|
||||||
|
label: "new Sql2o(...)",
|
||||||
|
url: {
|
||||||
|
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
|
||||||
|
startLine: 23,
|
||||||
|
startColumn: 23,
|
||||||
|
endLine: 23,
|
||||||
|
endColumn: 36,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const modeledMethods: Record<string, ModeledMethod> = {
|
||||||
|
"org.sql2o.Sql2o#open()": {
|
||||||
|
type: "neutral",
|
||||||
|
kind: "",
|
||||||
|
input: "",
|
||||||
|
output: "",
|
||||||
|
},
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String)": {
|
||||||
|
type: "sink",
|
||||||
|
kind: "jndi-injection",
|
||||||
|
input: "Argument[0]",
|
||||||
|
output: "",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const usages: Record<string, string[]> = {
|
||||||
|
"org.springframework.boot.SpringApplication#run(Class,String[])": [
|
||||||
|
"public class Sql2oExampleApplication {\n public static void main(String[] args) {\n SpringApplication.run(Sql2oExampleApplication.class, args);\n }\n}",
|
||||||
|
],
|
||||||
|
"org.sql2o.Connection#createQuery(String)": [
|
||||||
|
' public String index(@RequestParam("id") String id) {\n try (var con = sql2o.open()) {\n con.createQuery("select 1 where id = " + id).executeScalar(Integer.class);\n }\n\n',
|
||||||
|
'\n try (var con = sql2o.open()) {\n con.createQuery("select 1").executeScalar(Integer.class);\n }\n\n',
|
||||||
|
],
|
||||||
|
"org.sql2o.Query#executeScalar(Class)": [
|
||||||
|
' public String index(@RequestParam("id") String id) {\n try (var con = sql2o.open()) {\n con.createQuery("select 1 where id = " + id).executeScalar(Integer.class);\n }\n\n',
|
||||||
|
'\n try (var con = sql2o.open()) {\n con.createQuery("select 1").executeScalar(Integer.class);\n }\n\n',
|
||||||
|
],
|
||||||
|
"org.sql2o.Sql2o#open()": [
|
||||||
|
' @GetMapping("/")\n public String index(@RequestParam("id") String id) {\n try (var con = sql2o.open()) {\n con.createQuery("select 1 where id = " + id).executeScalar(Integer.class);\n }\n',
|
||||||
|
' Sql2o sql2o = new Sql2o(url);\n\n try (var con = sql2o.open()) {\n con.createQuery("select 1").executeScalar(Integer.class);\n }\n',
|
||||||
|
],
|
||||||
|
"java.io.PrintStream#println(String)": [
|
||||||
|
' }\n\n System.out.println("Connected to " + url);\n\n return "Greetings from Spring Boot!";\n',
|
||||||
|
],
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String,String,String)": [
|
||||||
|
'@RestController\npublic class HelloController {\n private final Sql2o sql2o = new Sql2o("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1","sa", "");\n\n @GetMapping("/")\n',
|
||||||
|
],
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String)": [
|
||||||
|
' @GetMapping("/connect")\n public String connect(@RequestParam("url") String url) {\n Sql2o sql2o = new Sql2o(url);\n\n try (var con = sql2o.open()) {\n',
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it("creates a matching request", () => {
|
||||||
|
expect(
|
||||||
|
createAutoModelRequest("java", externalApiUsages, modeledMethods, usages),
|
||||||
|
).toEqual({
|
||||||
|
language: "java",
|
||||||
|
samples: [
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String)",
|
||||||
|
classification: {
|
||||||
|
type: "CLASSIFICATION_TYPE_SINK",
|
||||||
|
kind: "jndi-injection",
|
||||||
|
explanation: "",
|
||||||
|
},
|
||||||
|
usages: usages["org.sql2o.Sql2o#Sql2o(String)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
candidates: [
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Connection",
|
||||||
|
name: "createQuery",
|
||||||
|
signature: "(String)",
|
||||||
|
usages: usages["org.sql2o.Connection#createQuery(String)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Query",
|
||||||
|
name: "executeScalar",
|
||||||
|
signature: "(Class)",
|
||||||
|
usages: usages["org.sql2o.Query#executeScalar(Class)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.springframework.boot",
|
||||||
|
type: "SpringApplication",
|
||||||
|
name: "run",
|
||||||
|
signature: "(Class,String[])",
|
||||||
|
usages:
|
||||||
|
usages[
|
||||||
|
"org.springframework.boot.SpringApplication#run(Class,String[])"
|
||||||
|
],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.springframework.boot",
|
||||||
|
type: "SpringApplication",
|
||||||
|
name: "run",
|
||||||
|
signature: "(Class,String[])",
|
||||||
|
usages:
|
||||||
|
usages[
|
||||||
|
"org.springframework.boot.SpringApplication#run(Class,String[])"
|
||||||
|
],
|
||||||
|
input: "Argument[1]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "java.io",
|
||||||
|
type: "PrintStream",
|
||||||
|
name: "println",
|
||||||
|
signature: "(String)",
|
||||||
|
usages: usages["java.io.PrintStream#println(String)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: usages["org.sql2o.Sql2o#Sql2o(String,String,String)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: usages["org.sql2o.Sql2o#Sql2o(String,String,String)"],
|
||||||
|
input: "Argument[1]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: usages["org.sql2o.Sql2o#Sql2o(String,String,String)"],
|
||||||
|
input: "Argument[2]",
|
||||||
|
classification: undefined,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("parsePredictedClassifications", () => {
|
||||||
|
const predictions: Method[] = [
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "createQuery",
|
||||||
|
signature: "(String)",
|
||||||
|
usages: ["createQuery(...)", "createQuery(...)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: {
|
||||||
|
type: ClassificationType.Sink,
|
||||||
|
kind: "sql injection sink",
|
||||||
|
explanation: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "executeScalar",
|
||||||
|
signature: "(Class)",
|
||||||
|
usages: ["executeScalar(...)", "executeScalar(...)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: {
|
||||||
|
type: ClassificationType.Neutral,
|
||||||
|
kind: "",
|
||||||
|
explanation: "not a sink",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: ["new Sql2o(...)"],
|
||||||
|
input: "Argument[0]",
|
||||||
|
classification: {
|
||||||
|
type: ClassificationType.Neutral,
|
||||||
|
kind: "",
|
||||||
|
explanation: "not a sink",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: ["new Sql2o(...)"],
|
||||||
|
input: "Argument[1]",
|
||||||
|
classification: {
|
||||||
|
type: ClassificationType.Sink,
|
||||||
|
kind: "sql injection sink",
|
||||||
|
explanation: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
package: "org.sql2o",
|
||||||
|
type: "Sql2o",
|
||||||
|
name: "Sql2o",
|
||||||
|
signature: "(String,String,String)",
|
||||||
|
usages: ["new Sql2o(...)"],
|
||||||
|
input: "Argument[2]",
|
||||||
|
classification: {
|
||||||
|
type: ClassificationType.Sink,
|
||||||
|
kind: "sql injection sink",
|
||||||
|
explanation: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
it("correctly parses the output", () => {
|
||||||
|
expect(parsePredictedClassifications(predictions)).toEqual({
|
||||||
|
"org.sql2o.Sql2o#createQuery(String)": {
|
||||||
|
type: "sink",
|
||||||
|
kind: "sql injection sink",
|
||||||
|
input: "Argument[0]",
|
||||||
|
output: "",
|
||||||
|
},
|
||||||
|
"org.sql2o.Sql2o#executeScalar(Class)": {
|
||||||
|
type: "neutral",
|
||||||
|
kind: "",
|
||||||
|
input: "",
|
||||||
|
output: "",
|
||||||
|
},
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String,String,String)": {
|
||||||
|
type: "sink",
|
||||||
|
kind: "sql injection sink",
|
||||||
|
input: "Argument[1]",
|
||||||
|
output: "",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("compareInputOutput", () => {
|
||||||
|
it("with two small numeric arguments", () => {
|
||||||
|
expect(
|
||||||
|
compareInputOutput("Argument[0]", "Argument[1]"),
|
||||||
|
).toBeLessThanOrEqual(-1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with one larger non-alphabetic argument", () => {
|
||||||
|
expect(
|
||||||
|
compareInputOutput("Argument[10]", "Argument[2]"),
|
||||||
|
).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with one non-numeric arguments", () => {
|
||||||
|
expect(
|
||||||
|
compareInputOutput("Argument[5]", "Argument[this]"),
|
||||||
|
).toBeLessThanOrEqual(-1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with two non-numeric arguments", () => {
|
||||||
|
expect(
|
||||||
|
compareInputOutput("ReturnValue", "Argument[this]"),
|
||||||
|
).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with one unknown argument in the a position", () => {
|
||||||
|
expect(
|
||||||
|
compareInputOutput("FooBar", "Argument[this]"),
|
||||||
|
).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with one unknown argument in the b position", () => {
|
||||||
|
expect(compareInputOutput("Argument[this]", "FooBar")).toBeLessThanOrEqual(
|
||||||
|
-1,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with one empty string arguments", () => {
|
||||||
|
expect(compareInputOutput("Argument[5]", "")).toBeLessThanOrEqual(-1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("with two unknown arguments", () => {
|
||||||
|
expect(compareInputOutput("FooBar", "BarFoo")).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -4,14 +4,13 @@ import { DecodedBqrsChunk } from "../../../src/pure/bqrs-cli-types";
|
|||||||
describe("decodeBqrsToExternalApiUsages", () => {
|
describe("decodeBqrsToExternalApiUsages", () => {
|
||||||
const chunk: DecodedBqrsChunk = {
|
const chunk: DecodedBqrsChunk = {
|
||||||
columns: [
|
columns: [
|
||||||
{ name: "apiName", kind: "String" },
|
|
||||||
{ name: "supported", kind: "Boolean" },
|
|
||||||
{ name: "usage", kind: "Entity" },
|
{ name: "usage", kind: "Entity" },
|
||||||
|
{ name: "apiName", kind: "String" },
|
||||||
|
{ kind: "String" },
|
||||||
|
{ kind: "String" },
|
||||||
],
|
],
|
||||||
tuples: [
|
tuples: [
|
||||||
[
|
[
|
||||||
"java.io.PrintStream#println(String)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "println(...)",
|
label: "println(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -22,10 +21,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 49,
|
endColumn: 49,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"java.io.PrintStream#println(String)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.springframework.boot.SpringApplication#run(Class,String[])",
|
|
||||||
false,
|
|
||||||
{
|
{
|
||||||
label: "run(...)",
|
label: "run(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -36,10 +36,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 66,
|
endColumn: 66,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.springframework.boot.SpringApplication#run(Class,String[])",
|
||||||
|
"false",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Connection#createQuery(String)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "createQuery(...)",
|
label: "createQuery(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -50,10 +51,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 56,
|
endColumn: 56,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Connection#createQuery(String)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Connection#createQuery(String)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "createQuery(...)",
|
label: "createQuery(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -64,10 +66,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 39,
|
endColumn: 39,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Connection#createQuery(String)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Query#executeScalar(Class)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "executeScalar(...)",
|
label: "executeScalar(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -78,10 +81,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 85,
|
endColumn: 85,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Query#executeScalar(Class)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Query#executeScalar(Class)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "executeScalar(...)",
|
label: "executeScalar(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -92,10 +96,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 68,
|
endColumn: 68,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Query#executeScalar(Class)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Sql2o#open()",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "open(...)",
|
label: "open(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -106,10 +111,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 35,
|
endColumn: 35,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Sql2o#open()",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Sql2o#open()",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "open(...)",
|
label: "open(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -120,10 +126,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 35,
|
endColumn: 35,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Sql2o#open()",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Sql2o#Sql2o(String,String,String)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "new Sql2o(...)",
|
label: "new Sql2o(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -134,10 +141,11 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 88,
|
endColumn: 88,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String,String,String)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"org.sql2o.Sql2o#Sql2o(String)",
|
|
||||||
true,
|
|
||||||
{
|
{
|
||||||
label: "new Sql2o(...)",
|
label: "new Sql2o(...)",
|
||||||
url: {
|
url: {
|
||||||
@@ -148,6 +156,9 @@ describe("decodeBqrsToExternalApiUsages", () => {
|
|||||||
endColumn: 36,
|
endColumn: 36,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"org.sql2o.Sql2o#Sql2o(String)",
|
||||||
|
"true",
|
||||||
|
"supported",
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -241,6 +241,113 @@ describe("db config store", () => {
|
|||||||
configStore.dispose();
|
configStore.dispose();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should add unique remote repositories to the correct list", async () => {
|
||||||
|
// Initial set up
|
||||||
|
const dbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "list1",
|
||||||
|
repositories: ["owner/repo1"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const configStore = await initializeConfig(dbConfig, configPath, app);
|
||||||
|
expect(
|
||||||
|
configStore.getConfig().value.databases.variantAnalysis
|
||||||
|
.repositoryLists[0],
|
||||||
|
).toEqual({
|
||||||
|
name: "list1",
|
||||||
|
repositories: ["owner/repo1"],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add
|
||||||
|
const response = await configStore.addRemoteReposToList(
|
||||||
|
["owner/repo1", "owner/repo2"],
|
||||||
|
"list1",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Read the config file
|
||||||
|
const updatedDbConfig = (await readJSON(configPath)) as DbConfig;
|
||||||
|
|
||||||
|
// Check that the config file has been updated
|
||||||
|
const updatedRemoteDbs = updatedDbConfig.databases.variantAnalysis;
|
||||||
|
expect(updatedRemoteDbs.repositories).toHaveLength(0);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists).toHaveLength(1);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists[0]).toEqual({
|
||||||
|
name: "list1",
|
||||||
|
repositories: ["owner/repo1", "owner/repo2"],
|
||||||
|
});
|
||||||
|
expect(response).toEqual([]);
|
||||||
|
|
||||||
|
configStore.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add no more than 1000 repositories to a remote list when adding multiple repos", async () => {
|
||||||
|
// Initial set up
|
||||||
|
const dbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "list1",
|
||||||
|
repositories: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const configStore = await initializeConfig(dbConfig, configPath, app);
|
||||||
|
|
||||||
|
// Add
|
||||||
|
const response = await configStore.addRemoteReposToList(
|
||||||
|
[...Array(1001).keys()].map((i) => `owner/db${i}`),
|
||||||
|
"list1",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Read the config file
|
||||||
|
const updatedDbConfig = (await readJSON(configPath)) as DbConfig;
|
||||||
|
|
||||||
|
// Check that the config file has been updated
|
||||||
|
const updatedRemoteDbs = updatedDbConfig.databases.variantAnalysis;
|
||||||
|
expect(updatedRemoteDbs.repositories).toHaveLength(0);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists).toHaveLength(1);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists[0].repositories).toHaveLength(
|
||||||
|
1000,
|
||||||
|
);
|
||||||
|
expect(response).toEqual(["owner/db1000"]);
|
||||||
|
|
||||||
|
configStore.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add no more than 1000 repositories to a remote list when adding one repo", async () => {
|
||||||
|
// Initial set up
|
||||||
|
const dbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "list1",
|
||||||
|
repositories: [...Array(1000).keys()].map((i) => `owner/db${i}`),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const configStore = await initializeConfig(dbConfig, configPath, app);
|
||||||
|
|
||||||
|
// Add
|
||||||
|
const reponse = await configStore.addRemoteRepo("owner/db1000", "list1");
|
||||||
|
|
||||||
|
// Read the config file
|
||||||
|
const updatedDbConfig = (await readJSON(configPath)) as DbConfig;
|
||||||
|
|
||||||
|
// Check that the config file has been updated
|
||||||
|
const updatedRemoteDbs = updatedDbConfig.databases.variantAnalysis;
|
||||||
|
expect(updatedRemoteDbs.repositories).toHaveLength(0);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists).toHaveLength(1);
|
||||||
|
expect(updatedRemoteDbs.repositoryLists[0].repositories).toHaveLength(
|
||||||
|
1000,
|
||||||
|
);
|
||||||
|
expect(reponse).toEqual(["owner/db1000"]);
|
||||||
|
|
||||||
|
configStore.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
it("should add a remote owner", async () => {
|
it("should add a remote owner", async () => {
|
||||||
// Initial set up
|
// Initial set up
|
||||||
const dbConfig = createDbConfig();
|
const dbConfig = createDbConfig();
|
||||||
|
|||||||
@@ -88,6 +88,73 @@ describe("db manager", () => {
|
|||||||
).toEqual("owner2/repo2");
|
).toEqual("owner2/repo2");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should add new remote repos to a user defined list", async () => {
|
||||||
|
const dbConfig: DbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "my-list-1",
|
||||||
|
repositories: ["owner1/repo1"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await saveDbConfig(dbConfig);
|
||||||
|
|
||||||
|
await dbManager.addNewRemoteReposToList(["owner2/repo2"], "my-list-1");
|
||||||
|
|
||||||
|
const dbConfigFileContents = await readDbConfigDirectly();
|
||||||
|
expect(
|
||||||
|
dbConfigFileContents.databases.variantAnalysis.repositoryLists.length,
|
||||||
|
).toBe(1);
|
||||||
|
|
||||||
|
expect(
|
||||||
|
dbConfigFileContents.databases.variantAnalysis.repositoryLists[0],
|
||||||
|
).toEqual({
|
||||||
|
name: "my-list-1",
|
||||||
|
repositories: ["owner1/repo1", "owner2/repo2"],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return truncated repos when adding multiple repos to a user defined list", async () => {
|
||||||
|
const dbConfig: DbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "my-list-1",
|
||||||
|
repositories: [...Array(1000).keys()].map((i) => `owner/db${i}`),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await saveDbConfig(dbConfig);
|
||||||
|
|
||||||
|
const response = await dbManager.addNewRemoteReposToList(
|
||||||
|
["owner2/repo2"],
|
||||||
|
"my-list-1",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(response).toEqual(["owner2/repo2"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return truncated repos when adding one repo to a user defined list", async () => {
|
||||||
|
const dbConfig: DbConfig = createDbConfig({
|
||||||
|
remoteLists: [
|
||||||
|
{
|
||||||
|
name: "my-list-1",
|
||||||
|
repositories: [...Array(1000).keys()].map((i) => `owner/db${i}`),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
await saveDbConfig(dbConfig);
|
||||||
|
|
||||||
|
const response = await dbManager.addNewRemoteRepo(
|
||||||
|
"owner2/repo2",
|
||||||
|
"my-list-1",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(response).toEqual(["owner2/repo2"]);
|
||||||
|
});
|
||||||
|
|
||||||
it("should add a new remote repo to a user defined list", async () => {
|
it("should add a new remote repo to a user defined list", async () => {
|
||||||
const dbConfig: DbConfig = createDbConfig({
|
const dbConfig: DbConfig = createDbConfig({
|
||||||
remoteLists: [
|
remoteLists: [
|
||||||
|
|||||||
@@ -62,12 +62,17 @@ describe("getDbItemActions", () => {
|
|||||||
expect(actions.length).toEqual(0);
|
expect(actions.length).toEqual(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should set canBeSelected, canBeRemoved and canBeRenamed for remote user defined db list", () => {
|
it("should set canBeSelected, canBeRemoved, canBeRenamed and canImportCodeSearch for remote user defined db list", () => {
|
||||||
const dbItem = createRemoteUserDefinedListDbItem();
|
const dbItem = createRemoteUserDefinedListDbItem();
|
||||||
|
|
||||||
const actions = getDbItemActions(dbItem);
|
const actions = getDbItemActions(dbItem);
|
||||||
|
|
||||||
expect(actions).toEqual(["canBeSelected", "canBeRemoved", "canBeRenamed"]);
|
expect(actions).toEqual([
|
||||||
|
"canBeSelected",
|
||||||
|
"canBeRemoved",
|
||||||
|
"canBeRenamed",
|
||||||
|
"canImportCodeSearch",
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should not set canBeSelected for remote user defined db list that is already selected", () => {
|
it("should not set canBeSelected for remote user defined db list that is already selected", () => {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import * as ghApiClient from "../../../../src/variant-analysis/gh-api/gh-api-client";
|
import * as ghApiClient from "../../../../src/variant-analysis/gh-api/gh-api-client";
|
||||||
|
import { RequestError } from "@octokit/request-error";
|
||||||
import { VariantAnalysisMonitor } from "../../../../src/variant-analysis/variant-analysis-monitor";
|
import { VariantAnalysisMonitor } from "../../../../src/variant-analysis/variant-analysis-monitor";
|
||||||
import {
|
import {
|
||||||
VariantAnalysis as VariantAnalysisApiResponse,
|
VariantAnalysis as VariantAnalysisApiResponse,
|
||||||
@@ -297,6 +298,55 @@ describe("Variant Analysis Monitor", () => {
|
|||||||
expect(mockEecuteCommand).not.toBeCalled();
|
expect(mockEecuteCommand).not.toBeCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("when a 404 is returned", () => {
|
||||||
|
let showAndLogWarningMessageSpy: jest.SpiedFunction<
|
||||||
|
typeof helpers.showAndLogWarningMessage
|
||||||
|
>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
showAndLogWarningMessageSpy = jest
|
||||||
|
.spyOn(helpers, "showAndLogWarningMessage")
|
||||||
|
.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const scannedRepos = createMockScannedRepos([
|
||||||
|
"pending",
|
||||||
|
"in_progress",
|
||||||
|
"in_progress",
|
||||||
|
"in_progress",
|
||||||
|
"pending",
|
||||||
|
"pending",
|
||||||
|
]);
|
||||||
|
mockApiResponse = createMockApiResponse("in_progress", scannedRepos);
|
||||||
|
mockGetVariantAnalysis.mockResolvedValueOnce(mockApiResponse);
|
||||||
|
|
||||||
|
mockGetVariantAnalysis.mockRejectedValueOnce(
|
||||||
|
new RequestError("Not Found", 404, {
|
||||||
|
request: {
|
||||||
|
method: "GET",
|
||||||
|
url: "",
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
url: "",
|
||||||
|
data: {},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should stop requesting the variant analysis", async () => {
|
||||||
|
await variantAnalysisMonitor.monitorVariantAnalysis(variantAnalysis);
|
||||||
|
|
||||||
|
expect(mockGetVariantAnalysis).toHaveBeenCalledTimes(2);
|
||||||
|
expect(showAndLogWarningMessageSpy).toHaveBeenCalledTimes(1);
|
||||||
|
expect(showAndLogWarningMessageSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringMatching(/not found/i),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
function limitNumberOfAttemptsToMonitor() {
|
function limitNumberOfAttemptsToMonitor() {
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ import {
|
|||||||
QueryRunner,
|
QueryRunner,
|
||||||
} from "../../../src/query-server/query-runner";
|
} from "../../../src/query-server/query-runner";
|
||||||
import { SELECT_QUERY_NAME } from "../../../src/language-support";
|
import { SELECT_QUERY_NAME } from "../../../src/language-support";
|
||||||
import { LocalQueries } from "../../../src/local-queries";
|
import { LocalQueries, QuickEvalType } from "../../../src/local-queries";
|
||||||
import { QueryResultType } from "../../../src/pure/new-messages";
|
import { QueryResultType } from "../../../src/pure/new-messages";
|
||||||
import { createVSCodeCommandManager } from "../../../src/common/vscode/commands";
|
import { createVSCodeCommandManager } from "../../../src/common/vscode/commands";
|
||||||
import {
|
import {
|
||||||
@@ -45,7 +45,7 @@ async function compileAndRunQuery(
|
|||||||
mode: DebugMode,
|
mode: DebugMode,
|
||||||
appCommands: AppCommandManager,
|
appCommands: AppCommandManager,
|
||||||
localQueries: LocalQueries,
|
localQueries: LocalQueries,
|
||||||
quickEval: boolean,
|
quickEval: QuickEvalType,
|
||||||
queryUri: Uri,
|
queryUri: Uri,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
@@ -184,7 +184,7 @@ describeWithCodeQL()("Queries", () => {
|
|||||||
mode,
|
mode,
|
||||||
appCommandManager,
|
appCommandManager,
|
||||||
localQueries,
|
localQueries,
|
||||||
false,
|
QuickEvalType.None,
|
||||||
Uri.file(queryUsingExtensionPath),
|
Uri.file(queryUsingExtensionPath),
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
@@ -218,7 +218,7 @@ describeWithCodeQL()("Queries", () => {
|
|||||||
mode,
|
mode,
|
||||||
appCommandManager,
|
appCommandManager,
|
||||||
localQueries,
|
localQueries,
|
||||||
false,
|
QuickEvalType.None,
|
||||||
Uri.file(queryPath),
|
Uri.file(queryPath),
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
@@ -238,7 +238,7 @@ describeWithCodeQL()("Queries", () => {
|
|||||||
mode,
|
mode,
|
||||||
appCommandManager,
|
appCommandManager,
|
||||||
localQueries,
|
localQueries,
|
||||||
false,
|
QuickEvalType.None,
|
||||||
Uri.file(queryPath),
|
Uri.file(queryPath),
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import {
|
|||||||
import { KeyType, resolveQueries } from "../../../src/language-support";
|
import { KeyType, resolveQueries } from "../../../src/language-support";
|
||||||
import { faker } from "@faker-js/faker";
|
import { faker } from "@faker-js/faker";
|
||||||
import { getActivatedExtension } from "../global.helper";
|
import { getActivatedExtension } from "../global.helper";
|
||||||
|
import { BaseLogger } from "../../../src/common";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform proper integration tests by running the CLI
|
* Perform proper integration tests by running the CLI
|
||||||
@@ -23,10 +24,14 @@ describe("Use cli", () => {
|
|||||||
let cli: CodeQLCliServer;
|
let cli: CodeQLCliServer;
|
||||||
let supportedLanguages: string[];
|
let supportedLanguages: string[];
|
||||||
|
|
||||||
|
let logSpy: jest.SpiedFunction<BaseLogger["log"]>;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
const extension = await getActivatedExtension();
|
const extension = await getActivatedExtension();
|
||||||
cli = extension.cliServer;
|
cli = extension.cliServer;
|
||||||
supportedLanguages = await cli.getSupportedLanguages();
|
supportedLanguages = await cli.getSupportedLanguages();
|
||||||
|
|
||||||
|
logSpy = jest.spyOn(cli.logger, "log");
|
||||||
});
|
});
|
||||||
|
|
||||||
if (process.env.CLI_VERSION && process.env.CLI_VERSION !== "nightly") {
|
if (process.env.CLI_VERSION && process.env.CLI_VERSION !== "nightly") {
|
||||||
@@ -42,6 +47,23 @@ describe("Use cli", () => {
|
|||||||
expect(result).toEqual(["-J-Xmx4096M", "--off-heap-ram=4096"]);
|
expect(result).toEqual(["-J-Xmx4096M", "--off-heap-ram=4096"]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("silent logging", () => {
|
||||||
|
it("should log command output", async () => {
|
||||||
|
const queryDir = getOnDiskWorkspaceFolders()[0];
|
||||||
|
await cli.resolveQueries(queryDir);
|
||||||
|
|
||||||
|
expect(logSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("shouldn't log command output if the `silent` flag is set", async () => {
|
||||||
|
const queryDir = getOnDiskWorkspaceFolders()[0];
|
||||||
|
const silent = true;
|
||||||
|
await cli.resolveQueries(queryDir, silent);
|
||||||
|
|
||||||
|
expect(logSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
itWithCodeQL()("should resolve query packs", async () => {
|
itWithCodeQL()("should resolve query packs", async () => {
|
||||||
const qlpacks = await cli.resolveQlpacks(getOnDiskWorkspaceFolders());
|
const qlpacks = await cli.resolveQlpacks(getOnDiskWorkspaceFolders());
|
||||||
// Depending on the version of the CLI, the qlpacks may have different names
|
// Depending on the version of the CLI, the qlpacks may have different names
|
||||||
|
|||||||
@@ -546,9 +546,7 @@ describe("SkeletonQueryWizard", () => {
|
|||||||
dateAdded: 123,
|
dateAdded: 123,
|
||||||
} as FullDatabaseOptions);
|
} as FullDatabaseOptions);
|
||||||
|
|
||||||
jest
|
mockDbItem.error = asError("database go boom!");
|
||||||
.spyOn(mockDbItem, "error", "get")
|
|
||||||
.mockReturnValue(asError("database go boom!"));
|
|
||||||
|
|
||||||
const sortedList =
|
const sortedList =
|
||||||
await SkeletonQueryWizard.sortDatabaseItemsByDateAdded([
|
await SkeletonQueryWizard.sortDatabaseItemsByDateAdded([
|
||||||
|
|||||||
@@ -222,6 +222,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
it("should run a remote query that is part of a qlpack", async () => {
|
it("should run a remote query that is part of a qlpack", async () => {
|
||||||
await doVariantAnalysisTest({
|
await doVariantAnalysisTest({
|
||||||
queryPath: "data-remote-qlpack/in-pack.ql",
|
queryPath: "data-remote-qlpack/in-pack.ql",
|
||||||
|
expectedPackName: "github/remote-query-pack",
|
||||||
filesThatExist: ["in-pack.ql", "lib.qll"],
|
filesThatExist: ["in-pack.ql", "lib.qll"],
|
||||||
filesThatDoNotExist: [],
|
filesThatDoNotExist: [],
|
||||||
qlxFilesThatExist: ["in-pack.qlx"],
|
qlxFilesThatExist: ["in-pack.qlx"],
|
||||||
@@ -231,6 +232,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
it("should run a remote query that is not part of a qlpack", async () => {
|
it("should run a remote query that is not part of a qlpack", async () => {
|
||||||
await doVariantAnalysisTest({
|
await doVariantAnalysisTest({
|
||||||
queryPath: "data-remote-no-qlpack/in-pack.ql",
|
queryPath: "data-remote-no-qlpack/in-pack.ql",
|
||||||
|
expectedPackName: "codeql-remote/query",
|
||||||
filesThatExist: ["in-pack.ql"],
|
filesThatExist: ["in-pack.ql"],
|
||||||
filesThatDoNotExist: ["lib.qll", "not-in-pack.ql"],
|
filesThatDoNotExist: ["lib.qll", "not-in-pack.ql"],
|
||||||
qlxFilesThatExist: ["in-pack.qlx"],
|
qlxFilesThatExist: ["in-pack.qlx"],
|
||||||
@@ -240,6 +242,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
it("should run a remote query that is nested inside a qlpack", async () => {
|
it("should run a remote query that is nested inside a qlpack", async () => {
|
||||||
await doVariantAnalysisTest({
|
await doVariantAnalysisTest({
|
||||||
queryPath: "data-remote-qlpack-nested/subfolder/in-pack.ql",
|
queryPath: "data-remote-qlpack-nested/subfolder/in-pack.ql",
|
||||||
|
expectedPackName: "github/remote-query-pack",
|
||||||
filesThatExist: ["subfolder/in-pack.ql", "otherfolder/lib.qll"],
|
filesThatExist: ["subfolder/in-pack.ql", "otherfolder/lib.qll"],
|
||||||
filesThatDoNotExist: ["subfolder/not-in-pack.ql"],
|
filesThatDoNotExist: ["subfolder/not-in-pack.ql"],
|
||||||
qlxFilesThatExist: ["subfolder/in-pack.qlx"],
|
qlxFilesThatExist: ["subfolder/in-pack.qlx"],
|
||||||
@@ -256,6 +259,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
await cli.setUseExtensionPacks(true);
|
await cli.setUseExtensionPacks(true);
|
||||||
await doVariantAnalysisTest({
|
await doVariantAnalysisTest({
|
||||||
queryPath: "data-remote-qlpack-nested/subfolder/in-pack.ql",
|
queryPath: "data-remote-qlpack-nested/subfolder/in-pack.ql",
|
||||||
|
expectedPackName: "github/remote-query-pack",
|
||||||
filesThatExist: [
|
filesThatExist: [
|
||||||
"subfolder/in-pack.ql",
|
"subfolder/in-pack.ql",
|
||||||
"otherfolder/lib.qll",
|
"otherfolder/lib.qll",
|
||||||
@@ -273,12 +277,14 @@ describe("Variant Analysis Manager", () => {
|
|||||||
|
|
||||||
async function doVariantAnalysisTest({
|
async function doVariantAnalysisTest({
|
||||||
queryPath,
|
queryPath,
|
||||||
|
expectedPackName,
|
||||||
filesThatExist,
|
filesThatExist,
|
||||||
qlxFilesThatExist,
|
qlxFilesThatExist,
|
||||||
filesThatDoNotExist,
|
filesThatDoNotExist,
|
||||||
dependenciesToCheck = ["codeql/javascript-all"],
|
dependenciesToCheck = ["codeql/javascript-all"],
|
||||||
}: {
|
}: {
|
||||||
queryPath: string;
|
queryPath: string;
|
||||||
|
expectedPackName: string;
|
||||||
filesThatExist: string[];
|
filesThatExist: string[];
|
||||||
qlxFilesThatExist: string[];
|
qlxFilesThatExist: string[];
|
||||||
filesThatDoNotExist: string[];
|
filesThatDoNotExist: string[];
|
||||||
@@ -332,7 +338,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
const qlpackContents = load(
|
const qlpackContents = load(
|
||||||
packFS.fileContents(packFileName).toString("utf-8"),
|
packFS.fileContents(packFileName).toString("utf-8"),
|
||||||
);
|
);
|
||||||
expect(qlpackContents.name).toEqual("codeql-remote/query");
|
expect(qlpackContents.name).toEqual(expectedPackName);
|
||||||
expect(qlpackContents.version).toEqual("0.0.0");
|
expect(qlpackContents.version).toEqual("0.0.0");
|
||||||
expect(qlpackContents.dependencies?.["codeql/javascript-all"]).toEqual(
|
expect(qlpackContents.dependencies?.["codeql/javascript-all"]).toEqual(
|
||||||
"*",
|
"*",
|
||||||
|
|||||||
@@ -349,7 +349,12 @@ describe("db panel rendering nodes", () => {
|
|||||||
expect(item.tooltip).toBeUndefined();
|
expect(item.tooltip).toBeUndefined();
|
||||||
expect(item.iconPath).toBeUndefined();
|
expect(item.iconPath).toBeUndefined();
|
||||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.Collapsed);
|
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.Collapsed);
|
||||||
checkDbItemActions(item, ["canBeSelected", "canBeRenamed", "canBeRemoved"]);
|
checkDbItemActions(item, [
|
||||||
|
"canBeSelected",
|
||||||
|
"canBeRenamed",
|
||||||
|
"canBeRemoved",
|
||||||
|
"canImportCodeSearch",
|
||||||
|
]);
|
||||||
expect(item.children).toBeTruthy();
|
expect(item.children).toBeTruthy();
|
||||||
expect(item.children.length).toBe(repos.length);
|
expect(item.children.length).toBe(repos.length);
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ export function run() {
|
|||||||
|
|
||||||
it("should allow ql files to be quick-evaled", async () => {
|
it("should allow ql files to be quick-evaled", async () => {
|
||||||
await showQlDocument("query.ql");
|
await showQlDocument("query.ql");
|
||||||
const q = await getQuickEvalContext(undefined);
|
const q = await getQuickEvalContext(undefined, false);
|
||||||
expect(
|
expect(
|
||||||
q.quickEvalPosition.fileName.endsWith(
|
q.quickEvalPosition.fileName.endsWith(
|
||||||
join("ql-vscode", "test", "data", "query.ql"),
|
join("ql-vscode", "test", "data", "query.ql"),
|
||||||
@@ -36,7 +36,7 @@ export function run() {
|
|||||||
|
|
||||||
it("should allow qll files to be quick-evaled", async () => {
|
it("should allow qll files to be quick-evaled", async () => {
|
||||||
await showQlDocument("library.qll");
|
await showQlDocument("library.qll");
|
||||||
const q = await getQuickEvalContext(undefined);
|
const q = await getQuickEvalContext(undefined, false);
|
||||||
expect(
|
expect(
|
||||||
q.quickEvalPosition.fileName.endsWith(
|
q.quickEvalPosition.fileName.endsWith(
|
||||||
join("ql-vscode", "test", "data", "library.qll"),
|
join("ql-vscode", "test", "data", "library.qll"),
|
||||||
@@ -55,7 +55,7 @@ export function run() {
|
|||||||
|
|
||||||
it("should reject non-ql[l] files when running a quick eval", async () => {
|
it("should reject non-ql[l] files when running a quick eval", async () => {
|
||||||
await showQlDocument("textfile.txt");
|
await showQlDocument("textfile.txt");
|
||||||
await expect(getQuickEvalContext(undefined)).rejects.toThrow(
|
await expect(getQuickEvalContext(undefined, false)).rejects.toThrow(
|
||||||
"The selected resource is not a CodeQL file",
|
"The selected resource is not a CodeQL file",
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import {
|
|||||||
DatabaseItemImpl,
|
DatabaseItemImpl,
|
||||||
DatabaseManager,
|
DatabaseManager,
|
||||||
DatabaseResolver,
|
DatabaseResolver,
|
||||||
findSourceArchive,
|
|
||||||
FullDatabaseOptions,
|
FullDatabaseOptions,
|
||||||
} from "../../../src/databases/local-databases";
|
} from "../../../src/databases/local-databases";
|
||||||
import { Logger } from "../../../src/common";
|
import { Logger } from "../../../src/common";
|
||||||
@@ -32,6 +31,7 @@ import {
|
|||||||
mockDbOptions,
|
mockDbOptions,
|
||||||
sourceLocationUri,
|
sourceLocationUri,
|
||||||
} from "../../factories/databases/databases";
|
} from "../../factories/databases/databases";
|
||||||
|
import { findSourceArchive } from "../../../src/databases/local-databases/database-resolver";
|
||||||
|
|
||||||
describe("local databases", () => {
|
describe("local databases", () => {
|
||||||
let databaseManager: DatabaseManager;
|
let databaseManager: DatabaseManager;
|
||||||
@@ -327,7 +327,7 @@ describe("local databases", () => {
|
|||||||
mockDbOptions(),
|
mockDbOptions(),
|
||||||
Uri.parse("file:/sourceArchive-uri/"),
|
Uri.parse("file:/sourceArchive-uri/"),
|
||||||
);
|
);
|
||||||
(db as any)._contents.sourceArchiveUri = undefined;
|
(db as any).contents.sourceArchiveUri = undefined;
|
||||||
expect(() => db.resolveSourceFile("abc")).toThrowError(
|
expect(() => db.resolveSourceFile("abc")).toThrowError(
|
||||||
"Scheme is missing",
|
"Scheme is missing",
|
||||||
);
|
);
|
||||||
@@ -339,7 +339,7 @@ describe("local databases", () => {
|
|||||||
mockDbOptions(),
|
mockDbOptions(),
|
||||||
Uri.parse("file:/sourceArchive-uri/"),
|
Uri.parse("file:/sourceArchive-uri/"),
|
||||||
);
|
);
|
||||||
(db as any)._contents.sourceArchiveUri = undefined;
|
(db as any).contents.sourceArchiveUri = undefined;
|
||||||
expect(() => db.resolveSourceFile("http://abc")).toThrowError(
|
expect(() => db.resolveSourceFile("http://abc")).toThrowError(
|
||||||
"Invalid uri scheme",
|
"Invalid uri scheme",
|
||||||
);
|
);
|
||||||
@@ -352,7 +352,7 @@ describe("local databases", () => {
|
|||||||
mockDbOptions(),
|
mockDbOptions(),
|
||||||
Uri.parse("file:/sourceArchive-uri/"),
|
Uri.parse("file:/sourceArchive-uri/"),
|
||||||
);
|
);
|
||||||
(db as any)._contents.sourceArchiveUri = undefined;
|
(db as any).contents.sourceArchiveUri = undefined;
|
||||||
const resolved = db.resolveSourceFile(undefined);
|
const resolved = db.resolveSourceFile(undefined);
|
||||||
expect(resolved.toString(true)).toBe(dbLocationUri(dir).toString(true));
|
expect(resolved.toString(true)).toBe(dbLocationUri(dir).toString(true));
|
||||||
});
|
});
|
||||||
@@ -363,7 +363,7 @@ describe("local databases", () => {
|
|||||||
mockDbOptions(),
|
mockDbOptions(),
|
||||||
Uri.parse("file:/sourceArchive-uri/"),
|
Uri.parse("file:/sourceArchive-uri/"),
|
||||||
);
|
);
|
||||||
(db as any)._contents.sourceArchiveUri = undefined;
|
(db as any).contents.sourceArchiveUri = undefined;
|
||||||
const resolved = db.resolveSourceFile("file:");
|
const resolved = db.resolveSourceFile("file:");
|
||||||
expect(resolved.toString()).toBe("file:///");
|
expect(resolved.toString()).toBe("file:///");
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -0,0 +1,208 @@
|
|||||||
|
import {
|
||||||
|
EventEmitter,
|
||||||
|
FileSystemWatcher,
|
||||||
|
Uri,
|
||||||
|
WorkspaceFoldersChangeEvent,
|
||||||
|
workspace,
|
||||||
|
} from "vscode";
|
||||||
|
import { CodeQLCliServer } from "../../../../src/codeql-cli/cli";
|
||||||
|
import { QueryDiscovery } from "../../../../src/queries-panel/query-discovery";
|
||||||
|
import { createMockApp } from "../../../__mocks__/appMock";
|
||||||
|
import { mockedObject } from "../../utils/mocking.helpers";
|
||||||
|
import { basename, join, sep } from "path";
|
||||||
|
|
||||||
|
describe("QueryDiscovery", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
expect(workspace.workspaceFolders?.length).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("queries", () => {
|
||||||
|
it("should return empty list when no QL files are present", async () => {
|
||||||
|
const resolveQueries = jest.fn().mockResolvedValue([]);
|
||||||
|
const cli = mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries,
|
||||||
|
});
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(createMockApp({}), cli);
|
||||||
|
await discovery.refresh();
|
||||||
|
const queries = discovery.queries;
|
||||||
|
|
||||||
|
expect(queries).toEqual([]);
|
||||||
|
expect(resolveQueries).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should organise query files into directories", async () => {
|
||||||
|
const workspaceRoot = workspace.workspaceFolders![0].uri.fsPath;
|
||||||
|
const cli = mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries: jest
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValue([
|
||||||
|
join(workspaceRoot, "dir1/query1.ql"),
|
||||||
|
join(workspaceRoot, "dir2/query2.ql"),
|
||||||
|
join(workspaceRoot, "query3.ql"),
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(createMockApp({}), cli);
|
||||||
|
await discovery.refresh();
|
||||||
|
const queries = discovery.queries;
|
||||||
|
expect(queries).toBeDefined();
|
||||||
|
|
||||||
|
expect(queries![0].children.length).toEqual(3);
|
||||||
|
expect(queries![0].children[0].name).toEqual("dir1");
|
||||||
|
expect(queries![0].children[0].children.length).toEqual(1);
|
||||||
|
expect(queries![0].children[0].children[0].name).toEqual("query1.ql");
|
||||||
|
expect(queries![0].children[1].name).toEqual("dir2");
|
||||||
|
expect(queries![0].children[1].children.length).toEqual(1);
|
||||||
|
expect(queries![0].children[1].children[0].name).toEqual("query2.ql");
|
||||||
|
expect(queries![0].children[2].name).toEqual("query3.ql");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should collapse directories containing only a single element", async () => {
|
||||||
|
const workspaceRoot = workspace.workspaceFolders![0].uri.fsPath;
|
||||||
|
const cli = mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries: jest
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValue([
|
||||||
|
join(workspaceRoot, "dir1/query1.ql"),
|
||||||
|
join(workspaceRoot, "dir1/dir2/dir3/dir3/query2.ql"),
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(createMockApp({}), cli);
|
||||||
|
await discovery.refresh();
|
||||||
|
const queries = discovery.queries;
|
||||||
|
expect(queries).toBeDefined();
|
||||||
|
|
||||||
|
expect(queries![0].children.length).toEqual(1);
|
||||||
|
expect(queries![0].children[0].name).toEqual("dir1");
|
||||||
|
expect(queries![0].children[0].children.length).toEqual(2);
|
||||||
|
expect(queries![0].children[0].children[0].name).toEqual(
|
||||||
|
"dir2 / dir3 / dir3",
|
||||||
|
);
|
||||||
|
expect(queries![0].children[0].children[0].children.length).toEqual(1);
|
||||||
|
expect(queries![0].children[0].children[0].children[0].name).toEqual(
|
||||||
|
"query2.ql",
|
||||||
|
);
|
||||||
|
expect(queries![0].children[0].children[1].name).toEqual("query1.ql");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("calls resolveQueries once for each workspace folder", async () => {
|
||||||
|
const workspaceRoots = [
|
||||||
|
`${sep}workspace1`,
|
||||||
|
`${sep}workspace2`,
|
||||||
|
`${sep}workspace3`,
|
||||||
|
];
|
||||||
|
jest.spyOn(workspace, "workspaceFolders", "get").mockReturnValueOnce(
|
||||||
|
workspaceRoots.map((root, index) => ({
|
||||||
|
uri: Uri.file(root),
|
||||||
|
name: basename(root),
|
||||||
|
index,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolveQueries = jest.fn().mockImplementation((queryDir) => {
|
||||||
|
const workspaceIndex = workspaceRoots.indexOf(queryDir);
|
||||||
|
if (workspaceIndex === -1) {
|
||||||
|
throw new Error("Unexpected workspace");
|
||||||
|
}
|
||||||
|
return Promise.resolve([
|
||||||
|
join(queryDir, `query${workspaceIndex + 1}.ql`),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
const cli = mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries,
|
||||||
|
});
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(createMockApp({}), cli);
|
||||||
|
await discovery.refresh();
|
||||||
|
const queries = discovery.queries;
|
||||||
|
expect(queries).toBeDefined();
|
||||||
|
|
||||||
|
expect(queries!.length).toEqual(3);
|
||||||
|
expect(queries![0].children[0].name).toEqual("query1.ql");
|
||||||
|
expect(queries![1].children[0].name).toEqual("query2.ql");
|
||||||
|
expect(queries![2].children[0].name).toEqual("query3.ql");
|
||||||
|
|
||||||
|
expect(resolveQueries).toHaveBeenCalledTimes(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("onDidChangeQueries", () => {
|
||||||
|
it("should fire onDidChangeQueries when a watcher fires", async () => {
|
||||||
|
const onWatcherDidChangeEvent = new EventEmitter<Uri>();
|
||||||
|
const watcher: FileSystemWatcher = {
|
||||||
|
ignoreCreateEvents: false,
|
||||||
|
ignoreChangeEvents: false,
|
||||||
|
ignoreDeleteEvents: false,
|
||||||
|
onDidCreate: onWatcherDidChangeEvent.event,
|
||||||
|
onDidChange: onWatcherDidChangeEvent.event,
|
||||||
|
onDidDelete: onWatcherDidChangeEvent.event,
|
||||||
|
dispose: () => undefined,
|
||||||
|
};
|
||||||
|
const createFileSystemWatcherSpy = jest.spyOn(
|
||||||
|
workspace,
|
||||||
|
"createFileSystemWatcher",
|
||||||
|
);
|
||||||
|
createFileSystemWatcherSpy.mockReturnValue(watcher);
|
||||||
|
|
||||||
|
const workspaceRoot = workspace.workspaceFolders![0].uri.fsPath;
|
||||||
|
const cli = mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries: jest
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValue([join(workspaceRoot, "query1.ql")]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(
|
||||||
|
createMockApp({
|
||||||
|
createEventEmitter: () => new EventEmitter(),
|
||||||
|
}),
|
||||||
|
cli,
|
||||||
|
);
|
||||||
|
|
||||||
|
const onDidChangeQueriesSpy = jest.fn();
|
||||||
|
discovery.onDidChangeQueries(onDidChangeQueriesSpy);
|
||||||
|
|
||||||
|
await discovery.refresh();
|
||||||
|
|
||||||
|
expect(createFileSystemWatcherSpy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(onDidChangeQueriesSpy).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
onWatcherDidChangeEvent.fire(workspace.workspaceFolders![0].uri);
|
||||||
|
|
||||||
|
await discovery.waitForCurrentRefresh();
|
||||||
|
|
||||||
|
expect(onDidChangeQueriesSpy).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("onDidChangeWorkspaceFolders", () => {
|
||||||
|
it("should refresh when workspace folders change", async () => {
|
||||||
|
const onDidChangeWorkspaceFoldersEvent =
|
||||||
|
new EventEmitter<WorkspaceFoldersChangeEvent>();
|
||||||
|
|
||||||
|
const discovery = new QueryDiscovery(
|
||||||
|
createMockApp({
|
||||||
|
createEventEmitter: () => new EventEmitter(),
|
||||||
|
onDidChangeWorkspaceFolders: onDidChangeWorkspaceFoldersEvent.event,
|
||||||
|
}),
|
||||||
|
mockedObject<CodeQLCliServer>({
|
||||||
|
resolveQueries: jest.fn().mockResolvedValue([]),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const onDidChangeQueriesSpy = jest.fn();
|
||||||
|
discovery.onDidChangeQueries(onDidChangeQueriesSpy);
|
||||||
|
|
||||||
|
await discovery.refresh();
|
||||||
|
|
||||||
|
expect(onDidChangeQueriesSpy).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
onDidChangeWorkspaceFoldersEvent.fire({ added: [], removed: [] });
|
||||||
|
|
||||||
|
await discovery.waitForCurrentRefresh();
|
||||||
|
|
||||||
|
expect(onDidChangeQueriesSpy).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
import { EventEmitter } from "vscode";
|
||||||
|
import {
|
||||||
|
FileTreeDirectory,
|
||||||
|
FileTreeLeaf,
|
||||||
|
} from "../../../../src/common/file-tree-nodes";
|
||||||
|
import {
|
||||||
|
QueryDiscoverer,
|
||||||
|
QueryTreeDataProvider,
|
||||||
|
} from "../../../../src/queries-panel/query-tree-data-provider";
|
||||||
|
|
||||||
|
describe("QueryTreeDataProvider", () => {
|
||||||
|
describe("getChildren", () => {
|
||||||
|
it("returns no children when queries is undefined", async () => {
|
||||||
|
const dataProvider = new QueryTreeDataProvider({
|
||||||
|
queries: undefined,
|
||||||
|
onDidChangeQueries: jest.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns no children when there are no queries", async () => {
|
||||||
|
const dataProvider = new QueryTreeDataProvider({
|
||||||
|
queries: [],
|
||||||
|
onDidChangeQueries: jest.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("converts FileTreeNode to QueryTreeViewItem", async () => {
|
||||||
|
const dataProvider = new QueryTreeDataProvider({
|
||||||
|
queries: [
|
||||||
|
new FileTreeDirectory("dir1", "dir1", [
|
||||||
|
new FileTreeDirectory("dir1/dir2", "dir2", [
|
||||||
|
new FileTreeLeaf("dir1/dir2/file1", "file1"),
|
||||||
|
new FileTreeLeaf("dir1/dir2/file1", "file2"),
|
||||||
|
]),
|
||||||
|
]),
|
||||||
|
new FileTreeDirectory("dir3", "dir3", [
|
||||||
|
new FileTreeLeaf("dir3/file3", "file3"),
|
||||||
|
]),
|
||||||
|
],
|
||||||
|
onDidChangeQueries: jest.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren().length).toEqual(2);
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren()[0].label).toEqual("dir1");
|
||||||
|
expect(dataProvider.getChildren()[0].children.length).toEqual(1);
|
||||||
|
expect(dataProvider.getChildren()[0].children[0].label).toEqual("dir2");
|
||||||
|
expect(dataProvider.getChildren()[0].children[0].children.length).toEqual(
|
||||||
|
2,
|
||||||
|
);
|
||||||
|
expect(
|
||||||
|
dataProvider.getChildren()[0].children[0].children[0].label,
|
||||||
|
).toEqual("file1");
|
||||||
|
expect(
|
||||||
|
dataProvider.getChildren()[0].children[0].children[1].label,
|
||||||
|
).toEqual("file2");
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren()[1].label).toEqual("dir3");
|
||||||
|
expect(dataProvider.getChildren()[1].children.length).toEqual(1);
|
||||||
|
expect(dataProvider.getChildren()[1].children[0].label).toEqual("file3");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("onDidChangeQueries", () => {
|
||||||
|
it("should update tree when the queries change", async () => {
|
||||||
|
const onDidChangeQueriesEmitter = new EventEmitter<void>();
|
||||||
|
const queryDiscoverer: QueryDiscoverer = {
|
||||||
|
queries: [
|
||||||
|
new FileTreeDirectory("dir1", "dir1", [
|
||||||
|
new FileTreeLeaf("dir1/file1", "file1"),
|
||||||
|
]),
|
||||||
|
],
|
||||||
|
onDidChangeQueries: onDidChangeQueriesEmitter.event,
|
||||||
|
};
|
||||||
|
|
||||||
|
const dataProvider = new QueryTreeDataProvider(queryDiscoverer);
|
||||||
|
expect(dataProvider.getChildren().length).toEqual(1);
|
||||||
|
|
||||||
|
queryDiscoverer.queries?.push(
|
||||||
|
new FileTreeDirectory("dir2", "dir2", [
|
||||||
|
new FileTreeLeaf("dir2/file2", "file2"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
onDidChangeQueriesEmitter.fire();
|
||||||
|
|
||||||
|
expect(dataProvider.getChildren().length).toEqual(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user