Merge branch 'main' into dbartol/new-test-ui
This commit is contained in:
4117
extensions/ql-vscode/package-lock.json
generated
4117
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -362,7 +362,7 @@
|
||||
"title": "CodeQL: Quick Query"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.createSkeletonQuery",
|
||||
"command": "codeQL.createQuery",
|
||||
"title": "CodeQL: Create Query"
|
||||
},
|
||||
{
|
||||
@@ -1328,6 +1328,10 @@
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.unloadScenario",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.scenarioLoaded"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.createQuery",
|
||||
"when": "config.codeQL.canary"
|
||||
}
|
||||
],
|
||||
"editor/context": [
|
||||
@@ -1500,7 +1504,7 @@
|
||||
"@storybook/addon-essentials": "^6.5.17-alpha.0",
|
||||
"@storybook/addon-interactions": "^6.5.17-alpha.0",
|
||||
"@storybook/addon-links": "^6.5.17-alpha.0",
|
||||
"@storybook/builder-webpack5": "^6.5.17-alpha.0",
|
||||
"@storybook/builder-webpack5": "^7.0.4",
|
||||
"@storybook/manager-webpack5": "^6.5.17-alpha.0",
|
||||
"@storybook/react": "^6.5.17-alpha.0",
|
||||
"@storybook/testing-library": "^0.0.13",
|
||||
|
||||
@@ -107,6 +107,21 @@ export type MlModelInfo = {
|
||||
/** The expected output of `codeql resolve ml-models`. */
|
||||
export type MlModelsInfo = { models: MlModelInfo[] };
|
||||
|
||||
/** Information about a data extension predicate, as resolved by `codeql resolve extensions`. */
|
||||
export type DataExtensionResult = {
|
||||
predicate: string;
|
||||
file: string;
|
||||
index: number;
|
||||
};
|
||||
|
||||
/** The expected output of `codeql resolve extensions`. */
|
||||
export type ResolveExtensionsResult = {
|
||||
models: MlModelInfo[];
|
||||
data: {
|
||||
[path: string]: DataExtensionResult[];
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve qlref`.
|
||||
*/
|
||||
@@ -1197,6 +1212,29 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about available extensions
|
||||
* @param suite The suite to resolve.
|
||||
* @param additionalPacks A list of directories to search for qlpacks.
|
||||
* @returns An object containing the list of models and extensions
|
||||
*/
|
||||
async resolveExtensions(
|
||||
suite: string,
|
||||
additionalPacks: string[],
|
||||
): Promise<ResolveExtensionsResult> {
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
args.push(suite);
|
||||
|
||||
return this.runJsonCodeQlCliCommand<ResolveExtensionsResult>(
|
||||
["resolve", "extensions"],
|
||||
args,
|
||||
"Resolving extensions",
|
||||
{
|
||||
addFormat: false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about the available languages.
|
||||
* @returns A dictionary mapping language name to the directory it comes from
|
||||
|
||||
@@ -13,16 +13,51 @@ import type {
|
||||
} from "../variant-analysis/shared/variant-analysis";
|
||||
|
||||
// A command function matching the signature that VS Code calls when
|
||||
// a command on a selection is invoked.
|
||||
export type SelectionCommandFunction<Item> = (
|
||||
singleItem: Item,
|
||||
multiSelect: Item[],
|
||||
// a command is invoked from the title bar of a TreeView with
|
||||
// canSelectMany set to true.
|
||||
//
|
||||
// It is possible to get any combination of singleItem and multiSelect
|
||||
// to be undefined. This is because it is possible to click a title bar
|
||||
// option without interacting with any individual items first, or even
|
||||
// when there are no items present at all.
|
||||
// If both singleItem and multiSelect are defined, then singleItem will
|
||||
// be contained within multiSelect.
|
||||
export type TreeViewTitleMultiSelectionCommandFunction<Item> = (
|
||||
singleItem: Item | undefined,
|
||||
multiSelect: Item[] | undefined,
|
||||
) => Promise<void>;
|
||||
|
||||
// A command function matching the signature that VS Code calls when
|
||||
// a command on a selection is invoked when canSelectMany is false.
|
||||
export type SingleSelectionCommandFunction<Item> = (
|
||||
// a command is invoked from a context menu on a TreeView with
|
||||
// canSelectMany set to true.
|
||||
//
|
||||
// singleItem will always be defined and corresponds to the item that
|
||||
// was hovered or right-clicked. If precisely one item was selected then
|
||||
// multiSelect will be undefined. If more than one item is selected then
|
||||
// multiSelect will contain all selected items, including singleItem.
|
||||
export type TreeViewContextMultiSelectionCommandFunction<Item> = (
|
||||
singleItem: Item,
|
||||
multiSelect: Item[] | undefined,
|
||||
) => Promise<void>;
|
||||
|
||||
// A command function matching the signature that VS Code calls when
|
||||
// a command is invoked from a context menu on a TreeView with
|
||||
// canSelectMany set to false.
|
||||
//
|
||||
// It is guaranteed that precisely one item will be selected.
|
||||
export type TreeViewContextSingleSelectionCommandFunction<Item> = (
|
||||
singleItem: Item,
|
||||
) => Promise<void>;
|
||||
|
||||
// A command function matching the signature that VS Code calls when
|
||||
// a command is invoked from a context menu on the file explorer.
|
||||
//
|
||||
// singleItem corresponds to the item that was right-clicked.
|
||||
// multiSelect will always been defined and non-empty and contains
|
||||
// all selected items, including singleItem.
|
||||
export type ExplorerSelectionCommandFunction<Item> = (
|
||||
singleItem: Item,
|
||||
multiSelect: Item[],
|
||||
) => Promise<void>;
|
||||
|
||||
/**
|
||||
@@ -94,12 +129,12 @@ export type LocalQueryCommands = {
|
||||
"codeQL.runQueryOnMultipleDatabasesContextEditor": (
|
||||
uri?: Uri,
|
||||
) => Promise<void>;
|
||||
"codeQL.runQueries": SelectionCommandFunction<Uri>;
|
||||
"codeQL.runQueries": ExplorerSelectionCommandFunction<Uri>;
|
||||
"codeQL.quickEval": (uri: Uri) => Promise<void>;
|
||||
"codeQL.quickEvalContextEditor": (uri: Uri) => Promise<void>;
|
||||
"codeQL.codeLensQuickEval": (uri: Uri, range: Range) => Promise<void>;
|
||||
"codeQL.quickQuery": () => Promise<void>;
|
||||
"codeQL.createSkeletonQuery": () => Promise<void>;
|
||||
"codeQL.createQuery": () => Promise<void>;
|
||||
};
|
||||
|
||||
export type ResultsViewCommands = {
|
||||
@@ -119,28 +154,28 @@ export type QueryHistoryCommands = {
|
||||
"codeQLQueryHistory.sortByCount": () => Promise<void>;
|
||||
|
||||
// Commands in the context menu or in the hover menu
|
||||
"codeQLQueryHistory.openQueryTitleMenu": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openQueryContextMenu": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemTitleMenu": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemContextMenu": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemContextInline": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.renameItem": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.compareWith": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLog": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLogSummary": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLogViewer": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showQueryLog": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showQueryText": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openQueryDirectory": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.cancel": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.exportResults": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewCsvResults": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewCsvAlerts": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewSarifAlerts": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewDil": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.itemClicked": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openOnGithub": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.copyRepoList": SelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openQueryTitleMenu": TreeViewTitleMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openQueryContextMenu": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemTitleMenu": TreeViewTitleMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemContextMenu": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.removeHistoryItemContextInline": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.renameItem": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.compareWith": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLog": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLogSummary": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showEvalLogViewer": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showQueryLog": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.showQueryText": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openQueryDirectory": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.cancel": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.exportResults": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewCsvResults": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewCsvAlerts": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewSarifAlerts": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.viewDil": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.itemClicked": TreeViewTitleMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.openOnGithub": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
"codeQLQueryHistory.copyRepoList": TreeViewContextMultiSelectionCommandFunction<QueryHistoryInfo>;
|
||||
|
||||
// Commands in the command palette
|
||||
"codeQL.exportSelectedVariantAnalysisResults": () => Promise<void>;
|
||||
@@ -173,11 +208,11 @@ export type LocalDatabasesCommands = {
|
||||
) => Promise<void>;
|
||||
|
||||
// Database panel selection commands
|
||||
"codeQLDatabases.removeDatabase": SelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.upgradeDatabase": SelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.renameDatabase": SelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.openDatabaseFolder": SelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.addDatabaseSource": SelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.removeDatabase": TreeViewContextMultiSelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.upgradeDatabase": TreeViewContextMultiSelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.renameDatabase": TreeViewContextMultiSelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.openDatabaseFolder": TreeViewContextMultiSelectionCommandFunction<DatabaseItem>;
|
||||
"codeQLDatabases.addDatabaseSource": TreeViewContextMultiSelectionCommandFunction<DatabaseItem>;
|
||||
|
||||
// Codespace template commands
|
||||
"codeQL.setDefaultTourDatabase": () => Promise<void>;
|
||||
@@ -222,11 +257,11 @@ export type DatabasePanelCommands = {
|
||||
"codeQLVariantAnalysisRepositories.addNewList": () => Promise<void>;
|
||||
"codeQLVariantAnalysisRepositories.setupControllerRepository": () => Promise<void>;
|
||||
|
||||
"codeQLVariantAnalysisRepositories.setSelectedItem": SingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.setSelectedItemContextMenu": SingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.openOnGitHubContextMenu": SingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.renameItemContextMenu": SingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.removeItemContextMenu": SingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.setSelectedItem": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.setSelectedItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.openOnGitHubContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.renameItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
"codeQLVariantAnalysisRepositories.removeItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
|
||||
};
|
||||
|
||||
export type AstCfgCommands = {
|
||||
|
||||
@@ -8,6 +8,8 @@ import { ensureDir } from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { App } from "../common/app";
|
||||
import { showAndLogErrorMessage } from "../helpers";
|
||||
import { withProgress } from "../progress";
|
||||
import { pickExtensionPackModelFile } from "./extension-pack-picker";
|
||||
|
||||
export class DataExtensionsEditorModule {
|
||||
private readonly queryStorageDir: string;
|
||||
@@ -49,31 +51,46 @@ export class DataExtensionsEditorModule {
|
||||
|
||||
public getCommands(): DataExtensionsEditorCommands {
|
||||
return {
|
||||
"codeQL.openDataExtensionsEditor": async () => {
|
||||
const db = this.databaseManager.currentDatabaseItem;
|
||||
if (!db) {
|
||||
void showAndLogErrorMessage("No database selected");
|
||||
return;
|
||||
}
|
||||
"codeQL.openDataExtensionsEditor": async () =>
|
||||
withProgress(
|
||||
async (progress) => {
|
||||
const db = this.databaseManager.currentDatabaseItem;
|
||||
if (!db) {
|
||||
void showAndLogErrorMessage("No database selected");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(await this.cliServer.cliConstraints.supportsQlpacksKind())) {
|
||||
void showAndLogErrorMessage(
|
||||
`This feature requires CodeQL CLI version ${CliVersionConstraint.CLI_VERSION_WITH_QLPACKS_KIND.format()} or later.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!(await this.cliServer.cliConstraints.supportsQlpacksKind())) {
|
||||
void showAndLogErrorMessage(
|
||||
`This feature requires CodeQL CLI version ${CliVersionConstraint.CLI_VERSION_WITH_QLPACKS_KIND.format()} or later.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const view = new DataExtensionsEditorView(
|
||||
this.ctx,
|
||||
this.app,
|
||||
this.databaseManager,
|
||||
this.cliServer,
|
||||
this.queryRunner,
|
||||
this.queryStorageDir,
|
||||
db,
|
||||
);
|
||||
await view.openView();
|
||||
},
|
||||
const modelFile = await pickExtensionPackModelFile(
|
||||
this.cliServer,
|
||||
progress,
|
||||
);
|
||||
if (!modelFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
const view = new DataExtensionsEditorView(
|
||||
this.ctx,
|
||||
this.app,
|
||||
this.databaseManager,
|
||||
this.cliServer,
|
||||
this.queryRunner,
|
||||
this.queryStorageDir,
|
||||
db,
|
||||
modelFile,
|
||||
);
|
||||
await view.openView();
|
||||
},
|
||||
{
|
||||
title: "Opening Data Extensions Editor",
|
||||
},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import {
|
||||
CancellationTokenSource,
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
ViewColumn,
|
||||
window,
|
||||
workspace,
|
||||
@@ -15,8 +14,8 @@ import {
|
||||
import { ProgressUpdate } from "../progress";
|
||||
import { QueryRunner } from "../queryRunner";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogWarningMessage,
|
||||
} from "../helpers";
|
||||
import { extLogger } from "../common";
|
||||
import { readFile, writeFile } from "fs-extra";
|
||||
@@ -61,6 +60,7 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
private readonly queryRunner: QueryRunner,
|
||||
private readonly queryStorageDir: string,
|
||||
private readonly databaseItem: DatabaseItem,
|
||||
private readonly modelFilename: string,
|
||||
) {
|
||||
super(ctx);
|
||||
}
|
||||
@@ -148,35 +148,27 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
externalApiUsages: ExternalApiUsage[],
|
||||
modeledMethods: Record<string, ModeledMethod>,
|
||||
): Promise<void> {
|
||||
const modelFilename = this.calculateModelFilename();
|
||||
if (!modelFilename) {
|
||||
return;
|
||||
}
|
||||
|
||||
const yaml = createDataExtensionYaml(externalApiUsages, modeledMethods);
|
||||
|
||||
await writeFile(modelFilename, yaml);
|
||||
await writeFile(this.modelFilename, yaml);
|
||||
|
||||
void extLogger.log(`Saved data extension YAML to ${modelFilename}`);
|
||||
void extLogger.log(`Saved data extension YAML to ${this.modelFilename}`);
|
||||
}
|
||||
|
||||
protected async loadExistingModeledMethods(): Promise<void> {
|
||||
const modelFilename = this.calculateModelFilename();
|
||||
if (!modelFilename) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const yaml = await readFile(modelFilename, "utf8");
|
||||
const yaml = await readFile(this.modelFilename, "utf8");
|
||||
|
||||
const data = loadYaml(yaml, {
|
||||
filename: modelFilename,
|
||||
filename: this.modelFilename,
|
||||
});
|
||||
|
||||
const existingModeledMethods = loadDataExtensionYaml(data);
|
||||
|
||||
if (!existingModeledMethods) {
|
||||
void showAndLogWarningMessage("Failed to parse data extension YAML.");
|
||||
void showAndLogErrorMessage(
|
||||
`Failed to parse data extension YAML ${this.modelFilename}.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -185,7 +177,11 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
modeledMethods: existingModeledMethods,
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
void extLogger.log(`Unable to read data extension YAML: ${e}`);
|
||||
void showAndLogErrorMessage(
|
||||
`Unable to read data extension YAML ${
|
||||
this.modelFilename
|
||||
}: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +214,6 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
const bqrsChunk = await readQueryResults({
|
||||
cliServer: this.cliServer,
|
||||
bqrsPath: queryResult.outputDir.bqrsPath,
|
||||
logger: extLogger,
|
||||
});
|
||||
if (!bqrsChunk) {
|
||||
await this.clearProgress();
|
||||
@@ -243,7 +238,7 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(err),
|
||||
)`Failed to load external APi usages: ${getErrorMessage(err)}`,
|
||||
)`Failed to load external API usages: ${getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -365,17 +360,4 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
message: "",
|
||||
});
|
||||
}
|
||||
|
||||
private calculateModelFilename(): string | undefined {
|
||||
const workspaceFolder = getQlSubmoduleFolder();
|
||||
if (!workspaceFolder) {
|
||||
return;
|
||||
}
|
||||
|
||||
return Uri.joinPath(
|
||||
workspaceFolder.uri,
|
||||
"java/ql/lib/ext",
|
||||
`${this.databaseItem.name.replaceAll("/", ".")}.model.yml`,
|
||||
).fsPath;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extensions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["addsTo", "data"],
|
||||
"properties": {
|
||||
"addsTo": {
|
||||
"type": "object",
|
||||
"required": ["pack", "extensible"],
|
||||
"properties": {
|
||||
"pack": {
|
||||
"type": "string"
|
||||
},
|
||||
"extensible": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
import { relative, sep } from "path";
|
||||
import { window } from "vscode";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage } from "../helpers";
|
||||
import { ProgressCallback } from "../progress";
|
||||
|
||||
const maxStep = 3;
|
||||
|
||||
export async function pickExtensionPackModelFile(
|
||||
cliServer: Pick<CodeQLCliServer, "resolveQlpacks" | "resolveExtensions">,
|
||||
progress: ProgressCallback,
|
||||
): Promise<string | undefined> {
|
||||
const extensionPackPath = await pickExtensionPack(cliServer, progress);
|
||||
if (!extensionPackPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const modelFile = await pickModelFile(cliServer, progress, extensionPackPath);
|
||||
if (!modelFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
return modelFile;
|
||||
}
|
||||
|
||||
async function pickExtensionPack(
|
||||
cliServer: Pick<CodeQLCliServer, "resolveQlpacks">,
|
||||
progress: ProgressCallback,
|
||||
): Promise<string | undefined> {
|
||||
progress({
|
||||
message: "Resolving extension packs...",
|
||||
step: 1,
|
||||
maxStep,
|
||||
});
|
||||
|
||||
// Get all existing extension packs in the workspace
|
||||
const additionalPacks = getOnDiskWorkspaceFolders();
|
||||
const extensionPacks = await cliServer.resolveQlpacks(additionalPacks, true);
|
||||
const options = Object.keys(extensionPacks).map((pack) => ({
|
||||
label: pack,
|
||||
extensionPack: pack,
|
||||
}));
|
||||
|
||||
progress({
|
||||
message: "Choosing extension pack...",
|
||||
step: 2,
|
||||
maxStep,
|
||||
});
|
||||
|
||||
const extensionPackOption = await window.showQuickPick(options, {
|
||||
title: "Select extension pack to use",
|
||||
});
|
||||
if (!extensionPackOption) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const extensionPackPaths = extensionPacks[extensionPackOption.extensionPack];
|
||||
if (extensionPackPaths.length !== 1) {
|
||||
void showAndLogErrorMessage(
|
||||
`Extension pack ${extensionPackOption.extensionPack} could not be resolved to a single location`,
|
||||
{
|
||||
fullMessage: `Extension pack ${
|
||||
extensionPackOption.extensionPack
|
||||
} could not be resolved to a single location. Found ${
|
||||
extensionPackPaths.length
|
||||
} locations: ${extensionPackPaths.join(", ")}.`,
|
||||
},
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return extensionPackPaths[0];
|
||||
}
|
||||
|
||||
async function pickModelFile(
|
||||
cliServer: Pick<CodeQLCliServer, "resolveExtensions">,
|
||||
progress: ProgressCallback,
|
||||
extensionPackPath: string,
|
||||
): Promise<string | undefined> {
|
||||
// Find the existing model files in the extension pack
|
||||
const additionalPacks = getOnDiskWorkspaceFolders();
|
||||
const extensions = await cliServer.resolveExtensions(
|
||||
extensionPackPath,
|
||||
additionalPacks,
|
||||
);
|
||||
|
||||
const modelFiles = new Set<string>();
|
||||
|
||||
if (extensionPackPath in extensions.data) {
|
||||
for (const extension of extensions.data[extensionPackPath]) {
|
||||
modelFiles.add(extension.file);
|
||||
}
|
||||
}
|
||||
|
||||
const fileOptions: Array<{ label: string; file: string }> = [];
|
||||
for (const file of modelFiles) {
|
||||
fileOptions.push({
|
||||
label: relative(extensionPackPath, file).replaceAll(sep, "/"),
|
||||
file,
|
||||
});
|
||||
}
|
||||
|
||||
progress({
|
||||
message: "Choosing model file...",
|
||||
step: 3,
|
||||
maxStep,
|
||||
});
|
||||
|
||||
const fileOption = await window.showQuickPick(fileOptions, {
|
||||
title: "Select model file to use",
|
||||
});
|
||||
|
||||
if (!fileOption) {
|
||||
return;
|
||||
}
|
||||
|
||||
return fileOption.file;
|
||||
}
|
||||
@@ -3,12 +3,16 @@ import { qlpackOfDatabase } from "../contextual/queryResolver";
|
||||
import { file } from "tmp-promise";
|
||||
import { writeFile } from "fs-extra";
|
||||
import { dump as dumpYaml } from "js-yaml";
|
||||
import { getOnDiskWorkspaceFolders } from "../helpers";
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
} from "../helpers";
|
||||
import { Logger, TeeLogger } from "../common";
|
||||
import { CancellationToken } from "vscode";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { DatabaseItem } from "../local-databases";
|
||||
import { ProgressCallback } from "../progress";
|
||||
import { redactableError } from "../pure/errors";
|
||||
|
||||
export type RunQueryOptions = {
|
||||
cliServer: Pick<CodeQLCliServer, "resolveQlpacks" | "resolveQueriesInSuite">;
|
||||
@@ -92,18 +96,16 @@ export async function runQuery({
|
||||
export type GetResultsOptions = {
|
||||
cliServer: Pick<CodeQLCliServer, "bqrsInfo" | "bqrsDecode">;
|
||||
bqrsPath: string;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
export async function readQueryResults({
|
||||
cliServer,
|
||||
bqrsPath,
|
||||
logger,
|
||||
}: GetResultsOptions) {
|
||||
const bqrsInfo = await cliServer.bqrsInfo(bqrsPath);
|
||||
if (bqrsInfo["result-sets"].length !== 1) {
|
||||
void logger.log(
|
||||
`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -6,11 +6,16 @@ import { CodeQLCliServer } from "../cli";
|
||||
import { TeeLogger } from "../common";
|
||||
import { extensiblePredicateDefinitions } from "./yaml";
|
||||
import { ProgressCallback } from "../progress";
|
||||
import { getOnDiskWorkspaceFolders } from "../helpers";
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
} from "../helpers";
|
||||
import {
|
||||
ModeledMethodType,
|
||||
ModeledMethodWithSignature,
|
||||
} from "./modeled-method";
|
||||
import { redactableError } from "../pure/errors";
|
||||
import { QueryResultType } from "../pure/new-messages";
|
||||
|
||||
type FlowModelOptions = {
|
||||
cliServer: CodeQLCliServer;
|
||||
@@ -67,13 +72,21 @@ async function getModeledMethodsFromFlow(
|
||||
token,
|
||||
new TeeLogger(queryRunner.logger, queryRun.outputDir.logPath),
|
||||
);
|
||||
if (queryResult.resultType !== QueryResultType.SUCCESS) {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Failed to run ${queryName} query: ${
|
||||
queryResult.message ?? "No message"
|
||||
}`,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
const bqrsPath = queryResult.outputDir.bqrsPath;
|
||||
|
||||
const bqrsInfo = await cliServer.bqrsInfo(bqrsPath);
|
||||
if (bqrsInfo["result-sets"].length !== 1) {
|
||||
throw new Error(
|
||||
`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Expected exactly one result set, got ${bqrsInfo["result-sets"].length} for ${queryName}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ExternalApiUsage } from "./external-api-usage";
|
||||
import {
|
||||
ModeledMethod,
|
||||
@@ -5,6 +7,11 @@ import {
|
||||
ModeledMethodWithSignature,
|
||||
} from "./modeled-method";
|
||||
|
||||
import * as dataSchemaJson from "./data-schema.json";
|
||||
|
||||
const ajv = new Ajv({ allErrors: true });
|
||||
const dataSchemaValidate = ajv.compile(dataSchemaJson);
|
||||
|
||||
type ExternalApiUsageByType = {
|
||||
externalApiUsage: ExternalApiUsage;
|
||||
modeledMethod: ModeledMethod;
|
||||
@@ -191,8 +198,14 @@ ${extensions.join("\n")}`;
|
||||
export function loadDataExtensionYaml(
|
||||
data: any,
|
||||
): Record<string, ModeledMethod> | undefined {
|
||||
if (typeof data !== "object") {
|
||||
return undefined;
|
||||
dataSchemaValidate(data);
|
||||
|
||||
if (dataSchemaValidate.errors) {
|
||||
throw new Error(
|
||||
`Invalid data extension YAML: ${dataSchemaValidate.errors
|
||||
.map((error) => `${error.instancePath} ${error.message}`)
|
||||
.join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
const extensions = data.extensions;
|
||||
@@ -204,19 +217,8 @@ export function loadDataExtensionYaml(
|
||||
|
||||
for (const extension of extensions) {
|
||||
const addsTo = extension.addsTo;
|
||||
if (typeof addsTo !== "object") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const extensible = addsTo.extensible;
|
||||
if (typeof extensible !== "string") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const data = extension.data;
|
||||
if (!Array.isArray(data)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const definition = Object.values(extensiblePredicateDefinitions).find(
|
||||
(definition) => definition.extensiblePredicate === extensible,
|
||||
|
||||
@@ -238,7 +238,7 @@ export class LocalQueries extends DisposableObject {
|
||||
"codeQL.quickEvalContextEditor": this.quickEval.bind(this),
|
||||
"codeQL.codeLensQuickEval": this.codeLensQuickEval.bind(this),
|
||||
"codeQL.quickQuery": this.quickQuery.bind(this),
|
||||
"codeQL.createSkeletonQuery": this.createSkeletonQuery.bind(this),
|
||||
"codeQL.createQuery": this.createSkeletonQuery.bind(this),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -267,7 +267,7 @@ export class LocalQueries extends DisposableObject {
|
||||
);
|
||||
}
|
||||
|
||||
private async runQueries(_: Uri | undefined, multi: Uri[]): Promise<void> {
|
||||
private async runQueries(_: unknown, multi: Uri[]): Promise<void> {
|
||||
await withProgress(
|
||||
async (progress, token) => {
|
||||
const maxQueryCount = MAX_QUERIES.getValue() as number;
|
||||
@@ -381,6 +381,8 @@ export class LocalQueries extends DisposableObject {
|
||||
await withProgress(
|
||||
async (progress: ProgressCallback, token: CancellationToken) => {
|
||||
const credentials = isCanary() ? this.app.credentials : undefined;
|
||||
const contextStoragePath =
|
||||
this.app.workspaceStoragePath || this.app.globalStoragePath;
|
||||
const skeletonQueryWizard = new SkeletonQueryWizard(
|
||||
this.cliServer,
|
||||
progress,
|
||||
@@ -388,6 +390,7 @@ export class LocalQueries extends DisposableObject {
|
||||
extLogger,
|
||||
this.databaseManager,
|
||||
token,
|
||||
contextStoragePath,
|
||||
);
|
||||
await skeletonQueryWizard.execute();
|
||||
},
|
||||
|
||||
@@ -39,10 +39,7 @@ import {
|
||||
QueryStatus,
|
||||
variantAnalysisStatusToQueryStatus,
|
||||
} from "../query-status";
|
||||
import {
|
||||
readQueryHistoryFromFile,
|
||||
writeQueryHistoryToFile,
|
||||
} from "./store/query-history-store";
|
||||
import { readQueryHistoryFromFile, writeQueryHistoryToFile } from "./store";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { CliVersionConstraint } from "../cli";
|
||||
import { HistoryItemLabelProvider } from "./history-item-label-provider";
|
||||
@@ -402,8 +399,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
async handleOpenQuery(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
singleItem: QueryHistoryInfo | undefined,
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
): Promise<void> {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -465,8 +462,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
async handleRemoveHistoryItem(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[] = [],
|
||||
singleItem: QueryHistoryInfo | undefined,
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -566,14 +563,14 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleRenameItem(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
): Promise<void> {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
multiSelect,
|
||||
);
|
||||
|
||||
if (!this.assertSingleQuery(finalMultiSelect)) {
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -595,7 +592,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleCompareWith(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -633,8 +630,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
async handleItemClicked(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[] = [],
|
||||
singleItem: QueryHistoryInfo | undefined,
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -668,7 +665,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleShowQueryLog(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
// Local queries only
|
||||
if (!this.assertSingleQuery(multiSelect) || singleItem?.t !== "local") {
|
||||
@@ -709,7 +706,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleOpenQueryDirectory(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -783,7 +780,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleShowEvalLog(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -811,7 +808,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleShowEvalLogSummary(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -849,7 +846,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleShowEvalLogViewer(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -889,7 +886,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleCancel(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -954,7 +951,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleViewSarifAlerts(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -988,7 +985,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleViewCsvResults(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1016,7 +1013,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleViewCsvAlerts(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1044,7 +1041,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleViewDil(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1071,7 +1068,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleOpenOnGithub(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1096,7 +1093,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleCopyRepoList(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1120,7 +1117,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
async handleExportResults(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
): Promise<void> {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(
|
||||
singleItem,
|
||||
@@ -1295,10 +1292,10 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
* @param multiSelect a multi-select or undefined if no items are selected
|
||||
*/
|
||||
private determineSelection(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
singleItem: QueryHistoryInfo | undefined,
|
||||
multiSelect: QueryHistoryInfo[] | undefined,
|
||||
): {
|
||||
finalSingleItem: QueryHistoryInfo;
|
||||
finalSingleItem: QueryHistoryInfo | undefined;
|
||||
finalMultiSelect: QueryHistoryInfo[];
|
||||
} {
|
||||
if (!singleItem && !multiSelect?.[0]) {
|
||||
@@ -1325,7 +1322,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
return {
|
||||
finalSingleItem: singleItem,
|
||||
finalMultiSelect: multiSelect,
|
||||
finalMultiSelect: multiSelect || [],
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
InitialQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { VariantAnalysisHistoryItem } from "../variant-analysis-history-item";
|
||||
import {
|
||||
CompletedQueryInfoData,
|
||||
QueryEvaluationInfoData,
|
||||
InitialQueryInfoData,
|
||||
LocalQueryDataItem,
|
||||
} from "./local-query-data-item";
|
||||
import { QueryHistoryDataItem } from "./query-history-data";
|
||||
|
||||
// Maps Query History Data Models to Domain Models
|
||||
|
||||
export function mapQueryHistoryToDomainModels(
|
||||
queries: QueryHistoryDataItem[],
|
||||
): QueryHistoryInfo[] {
|
||||
return queries.map((d) => {
|
||||
if (d.t === "variant-analysis") {
|
||||
const query: VariantAnalysisHistoryItem = d;
|
||||
return query;
|
||||
} else if (d.t === "local") {
|
||||
return mapLocalQueryDataItemToDomainModel(d);
|
||||
}
|
||||
|
||||
throw Error(
|
||||
`Unexpected or corrupted query history file. Unknown query history item: ${JSON.stringify(
|
||||
d,
|
||||
)}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryDataItemToDomainModel(
|
||||
localQuery: LocalQueryDataItem,
|
||||
): LocalQueryInfo {
|
||||
return new LocalQueryInfo(
|
||||
mapInitialQueryInfoDataToDomainModel(localQuery.initialInfo),
|
||||
undefined,
|
||||
localQuery.failureReason,
|
||||
localQuery.completedQuery &&
|
||||
mapCompletedQueryInfoDataToDomainModel(localQuery.completedQuery),
|
||||
localQuery.evalLogLocation,
|
||||
localQuery.evalLogSummaryLocation,
|
||||
localQuery.jsonEvalLogSummaryLocation,
|
||||
localQuery.evalLogSummarySymbolsLocation,
|
||||
);
|
||||
}
|
||||
|
||||
function mapCompletedQueryInfoDataToDomainModel(
|
||||
completedQuery: CompletedQueryInfoData,
|
||||
): CompletedQueryInfo {
|
||||
return new CompletedQueryInfo(
|
||||
mapQueryEvaluationInfoDataToDomainModel(completedQuery.query),
|
||||
{
|
||||
runId: completedQuery.result.runId,
|
||||
queryId: completedQuery.result.queryId,
|
||||
resultType: completedQuery.result.resultType,
|
||||
evaluationTime: completedQuery.result.evaluationTime,
|
||||
message: completedQuery.result.message,
|
||||
logFileLocation: completedQuery.result.logFileLocation,
|
||||
},
|
||||
completedQuery.logFileLocation,
|
||||
completedQuery.successful ?? completedQuery.sucessful,
|
||||
completedQuery.message,
|
||||
completedQuery.interpretedResultsSortState,
|
||||
completedQuery.resultCount,
|
||||
completedQuery.sortedResultsInfo,
|
||||
);
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoDataToDomainModel(
|
||||
initialInfo: InitialQueryInfoData,
|
||||
): InitialQueryInfo {
|
||||
return {
|
||||
userSpecifiedLabel: initialInfo.userSpecifiedLabel,
|
||||
queryText: initialInfo.queryText,
|
||||
isQuickQuery: initialInfo.isQuickQuery,
|
||||
isQuickEval: initialInfo.isQuickEval,
|
||||
quickEvalPosition: initialInfo.quickEvalPosition,
|
||||
queryPath: initialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: initialInfo.databaseInfo.databaseUri,
|
||||
name: initialInfo.databaseInfo.name,
|
||||
},
|
||||
start: new Date(initialInfo.start),
|
||||
id: initialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoDataToDomainModel(
|
||||
evaluationInfo: QueryEvaluationInfoData,
|
||||
): QueryEvaluationInfo {
|
||||
return new QueryEvaluationInfo(
|
||||
evaluationInfo.querySaveDir,
|
||||
evaluationInfo.dbItemPath,
|
||||
evaluationInfo.databaseHasMetadataFile,
|
||||
evaluationInfo.quickEvalPosition,
|
||||
evaluationInfo.metadata,
|
||||
);
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import { LocalQueryInfo, InitialQueryInfo } from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import {
|
||||
LocalQueryDataItem,
|
||||
InitialQueryInfoData,
|
||||
QueryEvaluationInfoData,
|
||||
} from "./local-query-data-item";
|
||||
import { QueryHistoryDataItem } from "./query-history-data";
|
||||
import { VariantAnalysisDataItem } from "./variant-analysis-data-item";
|
||||
|
||||
// Maps Query History Domain Models to Data Models
|
||||
|
||||
export function mapQueryHistoryToDataModels(
|
||||
queries: QueryHistoryInfo[],
|
||||
): QueryHistoryDataItem[] {
|
||||
return queries.map((q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
const query: VariantAnalysisDataItem = q;
|
||||
return query;
|
||||
} else if (q.t === "local") {
|
||||
return mapLocalQueryInfoToDataModel(q);
|
||||
} else {
|
||||
assertNever(q);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryInfoToDataModel(
|
||||
query: LocalQueryInfo,
|
||||
): LocalQueryDataItem {
|
||||
return {
|
||||
initialInfo: mapInitialQueryInfoToDataModel(query.initialInfo),
|
||||
t: "local",
|
||||
evalLogLocation: query.evalLogLocation,
|
||||
evalLogSummaryLocation: query.evalLogSummaryLocation,
|
||||
jsonEvalLogSummaryLocation: query.jsonEvalLogSummaryLocation,
|
||||
evalLogSummarySymbolsLocation: query.evalLogSummarySymbolsLocation,
|
||||
failureReason: query.failureReason,
|
||||
completedQuery: query.completedQuery && {
|
||||
query: mapQueryEvaluationInfoToDataModel(query.completedQuery.query),
|
||||
result: {
|
||||
runId: query.completedQuery.result.runId,
|
||||
queryId: query.completedQuery.result.queryId,
|
||||
resultType: query.completedQuery.result.resultType,
|
||||
evaluationTime: query.completedQuery.result.evaluationTime,
|
||||
message: query.completedQuery.result.message,
|
||||
logFileLocation: query.completedQuery.result.logFileLocation,
|
||||
},
|
||||
logFileLocation: query.completedQuery.logFileLocation,
|
||||
successful: query.completedQuery.successful,
|
||||
message: query.completedQuery.message,
|
||||
resultCount: query.completedQuery.resultCount,
|
||||
sortedResultsInfo: query.completedQuery.sortedResultsInfo,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDataModel(
|
||||
localQueryInitialInfo: InitialQueryInfo,
|
||||
): InitialQueryInfoData {
|
||||
return {
|
||||
userSpecifiedLabel: localQueryInitialInfo.userSpecifiedLabel,
|
||||
queryText: localQueryInitialInfo.queryText,
|
||||
isQuickQuery: localQueryInitialInfo.isQuickQuery,
|
||||
isQuickEval: localQueryInitialInfo.isQuickEval,
|
||||
quickEvalPosition: localQueryInitialInfo.quickEvalPosition,
|
||||
queryPath: localQueryInitialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: localQueryInitialInfo.databaseInfo.databaseUri,
|
||||
name: localQueryInitialInfo.databaseInfo.name,
|
||||
},
|
||||
start: localQueryInitialInfo.start,
|
||||
id: localQueryInitialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDataModel(
|
||||
queryEvaluationInfo: QueryEvaluationInfo,
|
||||
): QueryEvaluationInfoData {
|
||||
return {
|
||||
querySaveDir: queryEvaluationInfo.querySaveDir,
|
||||
dbItemPath: queryEvaluationInfo.dbItemPath,
|
||||
databaseHasMetadataFile: queryEvaluationInfo.databaseHasMetadataFile,
|
||||
quickEvalPosition: queryEvaluationInfo.quickEvalPosition,
|
||||
metadata: queryEvaluationInfo.metadata,
|
||||
resultsPaths: queryEvaluationInfo.resultsPaths,
|
||||
};
|
||||
}
|
||||
1
extensions/ql-vscode/src/query-history/store/index.ts
Normal file
1
extensions/ql-vscode/src/query-history/store/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./query-history-store";
|
||||
@@ -1,100 +0,0 @@
|
||||
export interface LocalQueryDataItem {
|
||||
initialInfo: InitialQueryInfoData;
|
||||
t: "local";
|
||||
evalLogLocation?: string;
|
||||
evalLogSummaryLocation?: string;
|
||||
jsonEvalLogSummaryLocation?: string;
|
||||
evalLogSummarySymbolsLocation?: string;
|
||||
completedQuery?: CompletedQueryInfoData;
|
||||
failureReason?: string;
|
||||
}
|
||||
|
||||
export interface InitialQueryInfoData {
|
||||
userSpecifiedLabel?: string;
|
||||
queryText: string;
|
||||
isQuickQuery: boolean;
|
||||
isQuickEval: boolean;
|
||||
quickEvalPosition?: PositionData;
|
||||
queryPath: string;
|
||||
databaseInfo: DatabaseInfoData;
|
||||
start: Date;
|
||||
id: string;
|
||||
}
|
||||
|
||||
interface DatabaseInfoData {
|
||||
name: string;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
interface PositionData {
|
||||
line: number;
|
||||
column: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface CompletedQueryInfoData {
|
||||
query: QueryEvaluationInfoData;
|
||||
message?: string;
|
||||
successful?: boolean;
|
||||
|
||||
// There once was a typo in the data model, which is why we need to support both
|
||||
sucessful?: boolean;
|
||||
result: EvaluationResultData;
|
||||
logFileLocation?: string;
|
||||
resultCount: number;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfo>;
|
||||
interpretedResultsSortState?: InterpretedResultsSortState;
|
||||
}
|
||||
|
||||
interface InterpretedResultsSortState {
|
||||
sortBy: InterpretedResultsSortColumn;
|
||||
sortDirection: SortDirection;
|
||||
}
|
||||
|
||||
type InterpretedResultsSortColumn = "alert-message";
|
||||
|
||||
interface SortedResultSetInfo {
|
||||
resultsPath: string;
|
||||
sortState: RawResultsSortState;
|
||||
}
|
||||
|
||||
interface RawResultsSortState {
|
||||
columnIndex: number;
|
||||
sortDirection: SortDirection;
|
||||
}
|
||||
|
||||
enum SortDirection {
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
interface EvaluationResultData {
|
||||
runId: number;
|
||||
queryId: number;
|
||||
resultType: number;
|
||||
evaluationTime: number;
|
||||
message?: string;
|
||||
logFileLocation?: string;
|
||||
}
|
||||
|
||||
export interface QueryEvaluationInfoData {
|
||||
querySaveDir: string;
|
||||
dbItemPath: string;
|
||||
databaseHasMetadataFile: boolean;
|
||||
quickEvalPosition?: PositionData;
|
||||
metadata?: QueryMetadataData;
|
||||
resultsPaths: {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface QueryMetadataData {
|
||||
name?: string;
|
||||
description?: string;
|
||||
id?: string;
|
||||
kind?: string;
|
||||
scored?: string;
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { LocalQueryDataItem } from "./local-query-data-item";
|
||||
import { VariantAnalysisDataItem } from "./variant-analysis-data-item";
|
||||
|
||||
export const ALLOWED_QUERY_HISTORY_VERSIONS = [1, 2];
|
||||
|
||||
export interface QueryHistoryData {
|
||||
version: number;
|
||||
queries: QueryHistoryDataItem[];
|
||||
}
|
||||
|
||||
export type QueryHistoryDataItem = LocalQueryDataItem | VariantAnalysisDataItem;
|
||||
@@ -0,0 +1,140 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
InitialQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import {
|
||||
QueryHistoryLocalQueryDto,
|
||||
InitialQueryInfoDto,
|
||||
QueryEvaluationInfoDto,
|
||||
CompletedQueryInfoDto,
|
||||
SortedResultSetInfoDto,
|
||||
SortDirectionDto,
|
||||
} from "./query-history-local-query-dto";
|
||||
import { QueryHistoryItemDto } from "./query-history-dto";
|
||||
import { QueryHistoryVariantAnalysisDto } from "./query-history-variant-analysis-dto";
|
||||
import {
|
||||
RawResultsSortState,
|
||||
SortDirection,
|
||||
SortedResultSetInfo,
|
||||
} from "../../pure/interface-types";
|
||||
|
||||
export function mapQueryHistoryToDto(
|
||||
queries: QueryHistoryInfo[],
|
||||
): QueryHistoryItemDto[] {
|
||||
return queries.map((q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
const query: QueryHistoryVariantAnalysisDto = q;
|
||||
return query;
|
||||
} else if (q.t === "local") {
|
||||
return mapLocalQueryInfoToDto(q);
|
||||
} else {
|
||||
assertNever(q);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryInfoToDto(
|
||||
query: LocalQueryInfo,
|
||||
): QueryHistoryLocalQueryDto {
|
||||
return {
|
||||
initialInfo: mapInitialQueryInfoToDto(query.initialInfo),
|
||||
t: "local",
|
||||
evalLogLocation: query.evalLogLocation,
|
||||
evalLogSummaryLocation: query.evalLogSummaryLocation,
|
||||
jsonEvalLogSummaryLocation: query.jsonEvalLogSummaryLocation,
|
||||
evalLogSummarySymbolsLocation: query.evalLogSummarySymbolsLocation,
|
||||
failureReason: query.failureReason,
|
||||
completedQuery:
|
||||
query.completedQuery && mapCompletedQueryToDto(query.completedQuery),
|
||||
};
|
||||
}
|
||||
|
||||
function mapCompletedQueryToDto(
|
||||
query: CompletedQueryInfo,
|
||||
): CompletedQueryInfoDto {
|
||||
const sortedResults = Object.fromEntries(
|
||||
Object.entries(query.sortedResultsInfo).map(([key, value]) => {
|
||||
return [key, mapSortedResultSetInfoToDto(value)];
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
query: mapQueryEvaluationInfoToDto(query.query),
|
||||
result: {
|
||||
runId: query.result.runId,
|
||||
queryId: query.result.queryId,
|
||||
resultType: query.result.resultType,
|
||||
evaluationTime: query.result.evaluationTime,
|
||||
message: query.result.message,
|
||||
logFileLocation: query.result.logFileLocation,
|
||||
},
|
||||
logFileLocation: query.logFileLocation,
|
||||
successful: query.successful,
|
||||
message: query.message,
|
||||
resultCount: query.resultCount,
|
||||
sortedResultsInfo: sortedResults,
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortDirectionToDto(sortDirection: SortDirection): SortDirectionDto {
|
||||
switch (sortDirection) {
|
||||
case SortDirection.asc:
|
||||
return SortDirectionDto.asc;
|
||||
case SortDirection.desc:
|
||||
return SortDirectionDto.desc;
|
||||
}
|
||||
}
|
||||
|
||||
function mapRawResultsSortStateToDto(
|
||||
sortState: RawResultsSortState,
|
||||
): SortedResultSetInfoDto["sortState"] {
|
||||
return {
|
||||
columnIndex: sortState.columnIndex,
|
||||
sortDirection: mapSortDirectionToDto(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortedResultSetInfoToDto(
|
||||
resultSet: SortedResultSetInfo,
|
||||
): SortedResultSetInfoDto {
|
||||
return {
|
||||
resultsPath: resultSet.resultsPath,
|
||||
sortState: mapRawResultsSortStateToDto(resultSet.sortState),
|
||||
};
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDto(
|
||||
localQueryInitialInfo: InitialQueryInfo,
|
||||
): InitialQueryInfoDto {
|
||||
return {
|
||||
userSpecifiedLabel: localQueryInitialInfo.userSpecifiedLabel,
|
||||
queryText: localQueryInitialInfo.queryText,
|
||||
isQuickQuery: localQueryInitialInfo.isQuickQuery,
|
||||
isQuickEval: localQueryInitialInfo.isQuickEval,
|
||||
quickEvalPosition: localQueryInitialInfo.quickEvalPosition,
|
||||
queryPath: localQueryInitialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: localQueryInitialInfo.databaseInfo.databaseUri,
|
||||
name: localQueryInitialInfo.databaseInfo.name,
|
||||
},
|
||||
start: localQueryInitialInfo.start,
|
||||
id: localQueryInitialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDto(
|
||||
queryEvaluationInfo: QueryEvaluationInfo,
|
||||
): QueryEvaluationInfoDto {
|
||||
return {
|
||||
querySaveDir: queryEvaluationInfo.querySaveDir,
|
||||
dbItemPath: queryEvaluationInfo.dbItemPath,
|
||||
databaseHasMetadataFile: queryEvaluationInfo.databaseHasMetadataFile,
|
||||
quickEvalPosition: queryEvaluationInfo.quickEvalPosition,
|
||||
metadata: queryEvaluationInfo.metadata,
|
||||
resultsPaths: queryEvaluationInfo.resultsPaths,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
InitialQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { VariantAnalysisHistoryItem } from "../variant-analysis-history-item";
|
||||
import {
|
||||
CompletedQueryInfoDto,
|
||||
QueryEvaluationInfoDto,
|
||||
InitialQueryInfoDto,
|
||||
QueryHistoryLocalQueryDto,
|
||||
SortDirectionDto,
|
||||
InterpretedResultsSortStateDto,
|
||||
SortedResultSetInfoDto,
|
||||
RawResultsSortStateDto,
|
||||
} from "./query-history-local-query-dto";
|
||||
import { QueryHistoryItemDto } from "./query-history-dto";
|
||||
import {
|
||||
InterpretedResultsSortState,
|
||||
RawResultsSortState,
|
||||
SortDirection,
|
||||
SortedResultSetInfo,
|
||||
} from "../../pure/interface-types";
|
||||
|
||||
export function mapQueryHistoryToDomainModel(
|
||||
queries: QueryHistoryItemDto[],
|
||||
): QueryHistoryInfo[] {
|
||||
return queries.map((d) => {
|
||||
if (d.t === "variant-analysis") {
|
||||
const query: VariantAnalysisHistoryItem = d;
|
||||
return query;
|
||||
} else if (d.t === "local") {
|
||||
return mapLocalQueryItemToDomainModel(d);
|
||||
}
|
||||
|
||||
throw Error(
|
||||
`Unexpected or corrupted query history file. Unknown query history item: ${JSON.stringify(
|
||||
d,
|
||||
)}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryItemToDomainModel(
|
||||
localQuery: QueryHistoryLocalQueryDto,
|
||||
): LocalQueryInfo {
|
||||
return new LocalQueryInfo(
|
||||
mapInitialQueryInfoToDomainModel(localQuery.initialInfo),
|
||||
undefined,
|
||||
localQuery.failureReason,
|
||||
localQuery.completedQuery &&
|
||||
mapCompletedQueryInfoToDomainModel(localQuery.completedQuery),
|
||||
localQuery.evalLogLocation,
|
||||
localQuery.evalLogSummaryLocation,
|
||||
localQuery.jsonEvalLogSummaryLocation,
|
||||
localQuery.evalLogSummarySymbolsLocation,
|
||||
);
|
||||
}
|
||||
|
||||
function mapCompletedQueryInfoToDomainModel(
|
||||
completedQuery: CompletedQueryInfoDto,
|
||||
): CompletedQueryInfo {
|
||||
const sortState =
|
||||
completedQuery.interpretedResultsSortState &&
|
||||
mapSortStateToDomainModel(completedQuery.interpretedResultsSortState);
|
||||
|
||||
const sortedResults = Object.fromEntries(
|
||||
Object.entries(completedQuery.sortedResultsInfo).map(([key, value]) => {
|
||||
return [key, mapSortedResultSetInfoToDomainModel(value)];
|
||||
}),
|
||||
);
|
||||
|
||||
return new CompletedQueryInfo(
|
||||
mapQueryEvaluationInfoToDomainModel(completedQuery.query),
|
||||
{
|
||||
runId: completedQuery.result.runId,
|
||||
queryId: completedQuery.result.queryId,
|
||||
resultType: completedQuery.result.resultType,
|
||||
evaluationTime: completedQuery.result.evaluationTime,
|
||||
message: completedQuery.result.message,
|
||||
logFileLocation: completedQuery.result.logFileLocation,
|
||||
},
|
||||
completedQuery.logFileLocation,
|
||||
completedQuery.successful ?? completedQuery.sucessful,
|
||||
completedQuery.message,
|
||||
sortState,
|
||||
completedQuery.resultCount,
|
||||
sortedResults,
|
||||
);
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDomainModel(
|
||||
initialInfo: InitialQueryInfoDto,
|
||||
): InitialQueryInfo {
|
||||
return {
|
||||
userSpecifiedLabel: initialInfo.userSpecifiedLabel,
|
||||
queryText: initialInfo.queryText,
|
||||
isQuickQuery: initialInfo.isQuickQuery,
|
||||
isQuickEval: initialInfo.isQuickEval,
|
||||
quickEvalPosition: initialInfo.quickEvalPosition,
|
||||
queryPath: initialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: initialInfo.databaseInfo.databaseUri,
|
||||
name: initialInfo.databaseInfo.name,
|
||||
},
|
||||
start: new Date(initialInfo.start),
|
||||
id: initialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDomainModel(
|
||||
evaluationInfo: QueryEvaluationInfoDto,
|
||||
): QueryEvaluationInfo {
|
||||
return new QueryEvaluationInfo(
|
||||
evaluationInfo.querySaveDir,
|
||||
evaluationInfo.dbItemPath,
|
||||
evaluationInfo.databaseHasMetadataFile,
|
||||
evaluationInfo.quickEvalPosition,
|
||||
evaluationInfo.metadata,
|
||||
);
|
||||
}
|
||||
|
||||
function mapSortDirectionToDomainModel(
|
||||
sortDirection: SortDirectionDto,
|
||||
): SortDirection {
|
||||
switch (sortDirection) {
|
||||
case SortDirectionDto.asc:
|
||||
return SortDirection.asc;
|
||||
case SortDirectionDto.desc:
|
||||
return SortDirection.desc;
|
||||
}
|
||||
}
|
||||
|
||||
function mapSortStateToDomainModel(
|
||||
sortState: InterpretedResultsSortStateDto,
|
||||
): InterpretedResultsSortState {
|
||||
return {
|
||||
sortBy: sortState.sortBy,
|
||||
sortDirection: mapSortDirectionToDomainModel(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortedResultSetInfoToDomainModel(
|
||||
sortedResultSetInfo: SortedResultSetInfoDto,
|
||||
): SortedResultSetInfo {
|
||||
return {
|
||||
resultsPath: sortedResultSetInfo.resultsPath,
|
||||
sortState: mapRawResultsSortStateToDomainModel(
|
||||
sortedResultSetInfo.sortState,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function mapRawResultsSortStateToDomainModel(
|
||||
sortState: RawResultsSortStateDto,
|
||||
): RawResultsSortState {
|
||||
return {
|
||||
columnIndex: sortState.columnIndex,
|
||||
sortDirection: mapSortDirectionToDomainModel(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { QueryHistoryLocalQueryDto } from "./query-history-local-query-dto";
|
||||
import { QueryHistoryVariantAnalysisDto } from "./query-history-variant-analysis-dto";
|
||||
|
||||
export interface QueryHistoryDto {
|
||||
version: number;
|
||||
queries: QueryHistoryItemDto[];
|
||||
}
|
||||
|
||||
export type QueryHistoryItemDto =
|
||||
| QueryHistoryLocalQueryDto
|
||||
| QueryHistoryVariantAnalysisDto;
|
||||
@@ -0,0 +1,103 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
export interface QueryHistoryLocalQueryDto {
|
||||
initialInfo: InitialQueryInfoDto;
|
||||
t: "local";
|
||||
evalLogLocation?: string;
|
||||
evalLogSummaryLocation?: string;
|
||||
jsonEvalLogSummaryLocation?: string;
|
||||
evalLogSummarySymbolsLocation?: string;
|
||||
completedQuery?: CompletedQueryInfoDto;
|
||||
failureReason?: string;
|
||||
}
|
||||
|
||||
export interface InitialQueryInfoDto {
|
||||
userSpecifiedLabel?: string;
|
||||
queryText: string;
|
||||
isQuickQuery: boolean;
|
||||
isQuickEval: boolean;
|
||||
quickEvalPosition?: PositionDto;
|
||||
queryPath: string;
|
||||
databaseInfo: DatabaseInfoDto;
|
||||
start: Date;
|
||||
id: string;
|
||||
}
|
||||
|
||||
interface DatabaseInfoDto {
|
||||
name: string;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
interface PositionDto {
|
||||
line: number;
|
||||
column: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface CompletedQueryInfoDto {
|
||||
query: QueryEvaluationInfoDto;
|
||||
message?: string;
|
||||
successful?: boolean;
|
||||
|
||||
// There once was a typo in the data model, which is why we need to support both
|
||||
sucessful?: boolean;
|
||||
result: EvaluationResultDto;
|
||||
logFileLocation?: string;
|
||||
resultCount: number;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfoDto>;
|
||||
interpretedResultsSortState?: InterpretedResultsSortStateDto;
|
||||
}
|
||||
|
||||
export interface InterpretedResultsSortStateDto {
|
||||
sortBy: InterpretedResultsSortColumnDto;
|
||||
sortDirection: SortDirectionDto;
|
||||
}
|
||||
|
||||
type InterpretedResultsSortColumnDto = "alert-message";
|
||||
|
||||
export interface SortedResultSetInfoDto {
|
||||
resultsPath: string;
|
||||
sortState: RawResultsSortStateDto;
|
||||
}
|
||||
|
||||
export interface RawResultsSortStateDto {
|
||||
columnIndex: number;
|
||||
sortDirection: SortDirectionDto;
|
||||
}
|
||||
|
||||
export enum SortDirectionDto {
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
interface EvaluationResultDto {
|
||||
runId: number;
|
||||
queryId: number;
|
||||
resultType: number;
|
||||
evaluationTime: number;
|
||||
message?: string;
|
||||
logFileLocation?: string;
|
||||
}
|
||||
|
||||
export interface QueryEvaluationInfoDto {
|
||||
querySaveDir: string;
|
||||
dbItemPath: string;
|
||||
databaseHasMetadataFile: boolean;
|
||||
quickEvalPosition?: PositionDto;
|
||||
metadata?: QueryMetadataDto;
|
||||
resultsPaths: {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface QueryMetadataDto {
|
||||
name?: string;
|
||||
description?: string;
|
||||
id?: string;
|
||||
kind?: string;
|
||||
scored?: string;
|
||||
}
|
||||
@@ -10,13 +10,11 @@ import {
|
||||
} from "../../pure/helpers-pure";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { redactableError } from "../../pure/errors";
|
||||
import {
|
||||
ALLOWED_QUERY_HISTORY_VERSIONS,
|
||||
QueryHistoryData,
|
||||
QueryHistoryDataItem,
|
||||
} from "./query-history-data";
|
||||
import { mapQueryHistoryToDomainModels } from "./data-mapper";
|
||||
import { mapQueryHistoryToDataModels } from "./domain-mapper";
|
||||
import { QueryHistoryDto, QueryHistoryItemDto } from "./query-history-dto";
|
||||
import { mapQueryHistoryToDomainModel } from "./query-history-dto-mapper";
|
||||
import { mapQueryHistoryToDto } from "./query-history-domain-mapper";
|
||||
|
||||
const ALLOWED_QUERY_HISTORY_VERSIONS = [1, 2];
|
||||
|
||||
export async function readQueryHistoryFromFile(
|
||||
fsPath: string,
|
||||
@@ -26,7 +24,7 @@ export async function readQueryHistoryFromFile(
|
||||
return [];
|
||||
}
|
||||
|
||||
const obj: QueryHistoryData = await readJson(fsPath, {
|
||||
const obj: QueryHistoryDto = await readJson(fsPath, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
|
||||
@@ -40,21 +38,21 @@ export async function readQueryHistoryFromFile(
|
||||
const queries = obj.queries;
|
||||
// Remove remote queries, which are not supported anymore.
|
||||
const parsedQueries = queries.filter(
|
||||
(q: QueryHistoryDataItem | { t: "remote" }) => q.t !== "remote",
|
||||
(q: QueryHistoryItemDto | { t: "remote" }) => q.t !== "remote",
|
||||
);
|
||||
|
||||
// Map the data models to the domain models.
|
||||
const domainModels: QueryHistoryInfo[] =
|
||||
mapQueryHistoryToDomainModels(parsedQueries);
|
||||
mapQueryHistoryToDomainModel(parsedQueries);
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// Filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
const filteredDomainModels: Promise<QueryHistoryInfo[]> = asyncFilter(
|
||||
domainModels,
|
||||
async (q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
// the query history store doesn't know where variant analysises are
|
||||
// The query history store doesn't know where variant analysises are
|
||||
// stored so we need to assume here that they exist. We check later
|
||||
// to see if they exist on disk.
|
||||
return true;
|
||||
@@ -72,7 +70,7 @@ export async function readQueryHistoryFromFile(
|
||||
fullMessage: `Error loading query history.\n${getErrorStack(e)}`,
|
||||
},
|
||||
);
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
// Since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
@@ -95,13 +93,13 @@ export async function writeQueryHistoryToFile(
|
||||
if (!(await pathExists(fsPath))) {
|
||||
await mkdir(dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
// Remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter((q) =>
|
||||
q.t === "local" ? q.completedQuery !== undefined : true,
|
||||
);
|
||||
|
||||
// map domain model queries to data model
|
||||
const queryHistoryData = mapQueryHistoryToDataModels(filteredQueries);
|
||||
// Map domain model queries to data model
|
||||
const queryHistoryData = mapQueryHistoryToDto(filteredQueries);
|
||||
|
||||
const data = JSON.stringify(
|
||||
{
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { QueryLanguage } from "../../common/query-language";
|
||||
import { QueryStatus } from "../../query-status";
|
||||
import {
|
||||
@@ -6,20 +9,19 @@ import {
|
||||
VariantAnalysisStatus,
|
||||
} from "../../variant-analysis/shared/variant-analysis";
|
||||
|
||||
// Data Model for Variant Analysis Query History Items
|
||||
// All data points are modelled, except enums.
|
||||
|
||||
export interface VariantAnalysisDataItem {
|
||||
export interface QueryHistoryVariantAnalysisDto {
|
||||
readonly t: "variant-analysis";
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
variantAnalysis: VariantAnalysisQueryHistoryData;
|
||||
variantAnalysis: VariantAnalysisQueryHistoryDto;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisQueryHistoryData {
|
||||
export interface VariantAnalysisQueryHistoryDto {
|
||||
id: number;
|
||||
controllerRepo: {
|
||||
id: number;
|
||||
@@ -44,11 +46,11 @@ export interface VariantAnalysisQueryHistoryData {
|
||||
completedAt?: string;
|
||||
actionsWorkflowRunId?: number;
|
||||
failureReason?: VariantAnalysisFailureReason;
|
||||
scannedRepos?: VariantAnalysisScannedRepositoryData[];
|
||||
skippedRepos?: VariantAnalysisSkippedRepositoriesData;
|
||||
scannedRepos?: VariantAnalysisScannedRepositoryDto[];
|
||||
skippedRepos?: VariantAnalysisSkippedRepositoriesDto;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisScannedRepositoryData {
|
||||
export interface VariantAnalysisScannedRepositoryDto {
|
||||
repository: {
|
||||
id: number;
|
||||
fullName: string;
|
||||
@@ -62,19 +64,19 @@ export interface VariantAnalysisScannedRepositoryData {
|
||||
failureMessage?: string;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoriesData {
|
||||
accessMismatchRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
notFoundRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
noCodeqlDbRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
overLimitRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
export interface VariantAnalysisSkippedRepositoriesDto {
|
||||
accessMismatchRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
notFoundRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
noCodeqlDbRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
overLimitRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryGroupData {
|
||||
export interface VariantAnalysisSkippedRepositoryGroupDto {
|
||||
repositoryCount: number;
|
||||
repositories: VariantAnalysisSkippedRepositoryData[];
|
||||
repositories: VariantAnalysisSkippedRepositoryDto[];
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryData {
|
||||
export interface VariantAnalysisSkippedRepositoryDto {
|
||||
id?: number;
|
||||
fullName: string;
|
||||
private?: boolean;
|
||||
@@ -1,4 +1,4 @@
|
||||
import { join } from "path";
|
||||
import { join, dirname } from "path";
|
||||
import { CancellationToken, Uri, workspace, window as Window } from "vscode";
|
||||
import { CodeQLCliServer } from "./cli";
|
||||
import { OutputChannelLogger } from "./common";
|
||||
@@ -8,8 +8,8 @@ import { askForLanguage, isFolderAlreadyInWorkspace } from "./helpers";
|
||||
import { getErrorMessage } from "./pure/helpers-pure";
|
||||
import { QlPackGenerator } from "./qlpack-generator";
|
||||
import { DatabaseItem, DatabaseManager } from "./local-databases";
|
||||
import * as databaseFetcher from "./databaseFetcher";
|
||||
import { ProgressCallback, UserCancellationException } from "./progress";
|
||||
import { askForGitHubRepo, downloadGitHubDatabase } from "./databaseFetcher";
|
||||
|
||||
type QueryLanguagesToDatabaseMap = Record<string, string>;
|
||||
|
||||
@@ -27,7 +27,7 @@ export const QUERY_LANGUAGE_TO_DATABASE_REPO: QueryLanguagesToDatabaseMap = {
|
||||
export class SkeletonQueryWizard {
|
||||
private language: string | undefined;
|
||||
private fileName = "example.ql";
|
||||
private storagePath: string | undefined;
|
||||
private qlPackStoragePath: string | undefined;
|
||||
|
||||
constructor(
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
@@ -36,6 +36,7 @@ export class SkeletonQueryWizard {
|
||||
private readonly extLogger: OutputChannelLogger,
|
||||
private readonly databaseManager: DatabaseManager,
|
||||
private readonly token: CancellationToken,
|
||||
private readonly databaseStoragePath: string | undefined,
|
||||
) {}
|
||||
|
||||
private get folderName() {
|
||||
@@ -49,7 +50,7 @@ export class SkeletonQueryWizard {
|
||||
return;
|
||||
}
|
||||
|
||||
this.storagePath = this.getFirstStoragePath();
|
||||
this.qlPackStoragePath = this.getFirstStoragePath();
|
||||
|
||||
const skeletonPackAlreadyExists = isFolderAlreadyInWorkspace(
|
||||
this.folderName,
|
||||
@@ -68,22 +69,9 @@ export class SkeletonQueryWizard {
|
||||
}
|
||||
|
||||
// open a query file
|
||||
await this.openExampleFile();
|
||||
}
|
||||
|
||||
private async openExampleFile() {
|
||||
if (this.folderName === undefined || this.storagePath === undefined) {
|
||||
throw new Error("Path to folder is undefined");
|
||||
}
|
||||
|
||||
const queryFileUri = Uri.file(
|
||||
join(this.storagePath, this.folderName, this.fileName),
|
||||
);
|
||||
|
||||
try {
|
||||
void workspace.openTextDocument(queryFileUri).then((doc) => {
|
||||
void Window.showTextDocument(doc);
|
||||
});
|
||||
await this.openExampleFile();
|
||||
} catch (e: unknown) {
|
||||
void this.extLogger.log(
|
||||
`Could not open example query file: ${getErrorMessage(e)}`,
|
||||
@@ -91,6 +79,20 @@ export class SkeletonQueryWizard {
|
||||
}
|
||||
}
|
||||
|
||||
private async openExampleFile() {
|
||||
if (this.folderName === undefined || this.qlPackStoragePath === undefined) {
|
||||
throw new Error("Path to folder is undefined");
|
||||
}
|
||||
|
||||
const queryFileUri = Uri.file(
|
||||
join(this.qlPackStoragePath, this.folderName, this.fileName),
|
||||
);
|
||||
|
||||
void workspace.openTextDocument(queryFileUri).then((doc) => {
|
||||
void Window.showTextDocument(doc);
|
||||
});
|
||||
}
|
||||
|
||||
public getFirstStoragePath() {
|
||||
const workspaceFolders = workspace.workspaceFolders;
|
||||
|
||||
@@ -99,15 +101,16 @@ export class SkeletonQueryWizard {
|
||||
}
|
||||
|
||||
const firstFolder = workspaceFolders[0];
|
||||
const firstFolderFsPath = firstFolder.uri.fsPath;
|
||||
|
||||
// For the vscode-codeql-starter repo, the first folder will be a ql pack
|
||||
// so we need to get the parent folder
|
||||
if (firstFolder.uri.path.includes("codeql-custom-queries")) {
|
||||
// slice off the last part of the path and return the parent folder
|
||||
return firstFolder.uri.path.split("/").slice(0, -1).join("/");
|
||||
if (firstFolderFsPath.includes("codeql-custom-queries")) {
|
||||
// return the parent folder
|
||||
return dirname(firstFolderFsPath);
|
||||
} else {
|
||||
// if the first folder is not a ql pack, then we are in a normal workspace
|
||||
return firstFolder.uri.path;
|
||||
return firstFolderFsPath;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,7 +118,7 @@ export class SkeletonQueryWizard {
|
||||
this.progress({
|
||||
message: "Choose language",
|
||||
step: 1,
|
||||
maxStep: 1,
|
||||
maxStep: 3,
|
||||
});
|
||||
|
||||
return await askForLanguage(this.cliServer, false);
|
||||
@@ -129,7 +132,7 @@ export class SkeletonQueryWizard {
|
||||
this.progress({
|
||||
message: "Creating skeleton QL pack around query",
|
||||
step: 2,
|
||||
maxStep: 2,
|
||||
maxStep: 3,
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -137,7 +140,7 @@ export class SkeletonQueryWizard {
|
||||
this.folderName,
|
||||
this.language as QueryLanguage,
|
||||
this.cliServer,
|
||||
this.storagePath,
|
||||
this.qlPackStoragePath,
|
||||
);
|
||||
|
||||
await qlPackGenerator.generate();
|
||||
@@ -157,7 +160,7 @@ export class SkeletonQueryWizard {
|
||||
message:
|
||||
"Skeleton query pack already exists. Creating additional query example file.",
|
||||
step: 2,
|
||||
maxStep: 2,
|
||||
maxStep: 3,
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -165,7 +168,7 @@ export class SkeletonQueryWizard {
|
||||
this.folderName,
|
||||
this.language as QueryLanguage,
|
||||
this.cliServer,
|
||||
this.storagePath,
|
||||
this.qlPackStoragePath,
|
||||
);
|
||||
|
||||
this.fileName = await this.determineNextFileName(this.folderName);
|
||||
@@ -178,22 +181,26 @@ export class SkeletonQueryWizard {
|
||||
}
|
||||
|
||||
private async determineNextFileName(folderName: string): Promise<string> {
|
||||
if (this.storagePath === undefined) {
|
||||
throw new Error("Workspace storage path is undefined");
|
||||
if (this.qlPackStoragePath === undefined) {
|
||||
throw new Error("QL Pack storage path is undefined");
|
||||
}
|
||||
|
||||
const folderUri = Uri.file(join(this.storagePath, folderName));
|
||||
const folderUri = Uri.file(join(this.qlPackStoragePath, folderName));
|
||||
const files = await workspace.fs.readDirectory(folderUri);
|
||||
const qlFiles = files.filter(([filename, _fileType]) =>
|
||||
filename.match(/example[0-9]*.ql/),
|
||||
filename.match(/^example[0-9]*\.ql$/),
|
||||
);
|
||||
|
||||
return `example${qlFiles.length + 1}.ql`;
|
||||
}
|
||||
|
||||
private async downloadDatabase() {
|
||||
if (this.storagePath === undefined) {
|
||||
throw new Error("Workspace storage path is undefined");
|
||||
if (this.qlPackStoragePath === undefined) {
|
||||
throw new Error("QL Pack storage path is undefined");
|
||||
}
|
||||
|
||||
if (this.databaseStoragePath === undefined) {
|
||||
throw new Error("Database storage path is undefined");
|
||||
}
|
||||
|
||||
if (this.language === undefined) {
|
||||
@@ -207,19 +214,16 @@ export class SkeletonQueryWizard {
|
||||
});
|
||||
|
||||
const githubRepoNwo = QUERY_LANGUAGE_TO_DATABASE_REPO[this.language];
|
||||
const chosenRepo = await databaseFetcher.askForGitHubRepo(
|
||||
undefined,
|
||||
githubRepoNwo,
|
||||
);
|
||||
const chosenRepo = await askForGitHubRepo(undefined, githubRepoNwo);
|
||||
|
||||
if (!chosenRepo) {
|
||||
throw new UserCancellationException("No GitHub repository provided");
|
||||
}
|
||||
|
||||
await databaseFetcher.downloadGitHubDatabase(
|
||||
await downloadGitHubDatabase(
|
||||
chosenRepo,
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
this.databaseStoragePath,
|
||||
this.credentials,
|
||||
this.progress,
|
||||
this.token,
|
||||
@@ -233,8 +237,8 @@ export class SkeletonQueryWizard {
|
||||
throw new Error("Language is undefined");
|
||||
}
|
||||
|
||||
if (this.storagePath === undefined) {
|
||||
throw new Error("Workspace storage path is undefined");
|
||||
if (this.qlPackStoragePath === undefined) {
|
||||
throw new Error("QL Pack storage path is undefined");
|
||||
}
|
||||
|
||||
const databaseNwo = QUERY_LANGUAGE_TO_DATABASE_REPO[this.language];
|
||||
@@ -273,8 +277,12 @@ export class SkeletonQueryWizard {
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const dbItems = databaseItems || [];
|
||||
const dbs = dbItems.filter(
|
||||
(db) => db.language === language && db.name === databaseNwo,
|
||||
(db) =>
|
||||
db.language === language &&
|
||||
db.name === databaseNwo &&
|
||||
db.error === undefined,
|
||||
);
|
||||
|
||||
if (dbs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -286,7 +294,9 @@ export class SkeletonQueryWizard {
|
||||
databaseItems: readonly DatabaseItem[],
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const dbItems = databaseItems || [];
|
||||
const dbs = dbItems.filter((db) => db.language === language);
|
||||
const dbs = dbItems.filter(
|
||||
(db) => db.language === language && db.error === undefined,
|
||||
);
|
||||
if (dbs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from "./repo-states-store";
|
||||
@@ -0,0 +1,46 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryStateDto,
|
||||
VariantAnalysisScannedRepositoryDownloadDto,
|
||||
} from "./repo-states-dto";
|
||||
|
||||
export function mapRepoStatesToDomainModel(
|
||||
repoStates: Record<number, VariantAnalysisScannedRepositoryStateDto>,
|
||||
): Record<number, VariantAnalysisScannedRepositoryState> {
|
||||
return Object.fromEntries(
|
||||
Object.entries(repoStates).map(([key, value]) => {
|
||||
return [key, mapRepoStateToDomainModel(value)];
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function mapRepoStateToDomainModel(
|
||||
repoState: VariantAnalysisScannedRepositoryStateDto,
|
||||
): VariantAnalysisScannedRepositoryState {
|
||||
return {
|
||||
repositoryId: repoState.repositoryId,
|
||||
downloadStatus: mapDownloadStatusToDomainModel(repoState.downloadStatus),
|
||||
downloadPercentage: repoState.downloadPercentage,
|
||||
};
|
||||
}
|
||||
|
||||
function mapDownloadStatusToDomainModel(
|
||||
downloadedStatus: VariantAnalysisScannedRepositoryDownloadDto,
|
||||
) {
|
||||
switch (downloadedStatus) {
|
||||
case VariantAnalysisScannedRepositoryDownloadDto.Pending:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Pending;
|
||||
case VariantAnalysisScannedRepositoryDownloadDto.InProgress:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.InProgress;
|
||||
case VariantAnalysisScannedRepositoryDownloadDto.Succeeded:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||
case VariantAnalysisScannedRepositoryDownloadDto.Failed:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||
default:
|
||||
assertNever(downloadedStatus);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryDownloadDto,
|
||||
VariantAnalysisScannedRepositoryStateDto,
|
||||
} from "./repo-states-dto";
|
||||
|
||||
export function mapRepoStatesToDto(
|
||||
repoStates: Record<number, VariantAnalysisScannedRepositoryState>,
|
||||
): Record<number, VariantAnalysisScannedRepositoryStateDto> {
|
||||
return Object.fromEntries(
|
||||
Object.entries(repoStates).map(([key, value]) => {
|
||||
return [key, mapRepoStateToDto(value)];
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function mapRepoStateToDto(
|
||||
repoState: VariantAnalysisScannedRepositoryState,
|
||||
): VariantAnalysisScannedRepositoryStateDto {
|
||||
return {
|
||||
repositoryId: repoState.repositoryId,
|
||||
downloadStatus: mapDownloadStatusToDto(repoState.downloadStatus),
|
||||
downloadPercentage: repoState.downloadPercentage,
|
||||
};
|
||||
}
|
||||
|
||||
function mapDownloadStatusToDto(
|
||||
downloadedStatus: VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
) {
|
||||
switch (downloadedStatus) {
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Pending:
|
||||
return VariantAnalysisScannedRepositoryDownloadDto.Pending;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.InProgress:
|
||||
return VariantAnalysisScannedRepositoryDownloadDto.InProgress;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Succeeded:
|
||||
return VariantAnalysisScannedRepositoryDownloadDto.Succeeded;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Failed:
|
||||
return VariantAnalysisScannedRepositoryDownloadDto.Failed;
|
||||
default:
|
||||
assertNever(downloadedStatus);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
export interface VariantAnalysisScannedRepositoryStateDto {
|
||||
repositoryId: number;
|
||||
downloadStatus: VariantAnalysisScannedRepositoryDownloadDto;
|
||||
downloadPercentage?: number;
|
||||
}
|
||||
|
||||
export enum VariantAnalysisScannedRepositoryDownloadDto {
|
||||
Pending = "pending",
|
||||
InProgress = "inProgress",
|
||||
Succeeded = "succeeded",
|
||||
Failed = "failed",
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { outputJson, readJson } from "fs-extra";
|
||||
import { VariantAnalysisScannedRepositoryState } from "../shared/variant-analysis";
|
||||
import { VariantAnalysisScannedRepositoryStateData } from "./repo-states-data-types";
|
||||
import { mapRepoStateToData } from "./repo-states-to-data-mapper";
|
||||
import { mapRepoStateToDomain } from "./repo-states-to-domain-mapper";
|
||||
import { VariantAnalysisScannedRepositoryStateDto } from "./repo-states-dto";
|
||||
import { mapRepoStatesToDto } from "./repo-states-dto-mapper";
|
||||
import { mapRepoStatesToDomainModel } from "./repo-states-domain-mapper";
|
||||
|
||||
export const REPO_STATES_FILENAME = "repo_states.json";
|
||||
|
||||
@@ -10,13 +10,7 @@ export async function writeRepoStates(
|
||||
storagePath: string,
|
||||
repoStates: Record<number, VariantAnalysisScannedRepositoryState>,
|
||||
): Promise<void> {
|
||||
// Map from repoStates Domain type to the repoStates Data type
|
||||
const repoStatesData = Object.fromEntries(
|
||||
Object.entries(repoStates).map(([key, value]) => {
|
||||
return [key, mapRepoStateToData(value)];
|
||||
}),
|
||||
);
|
||||
|
||||
const repoStatesData = mapRepoStatesToDto(repoStates);
|
||||
await outputJson(storagePath, repoStatesData);
|
||||
}
|
||||
|
||||
@@ -26,15 +20,10 @@ export async function readRepoStates(
|
||||
try {
|
||||
const repoStatesData: Record<
|
||||
number,
|
||||
VariantAnalysisScannedRepositoryStateData
|
||||
VariantAnalysisScannedRepositoryStateDto
|
||||
> = await readJson(storagePath);
|
||||
|
||||
// Map from repoStates Data type to the repoStates Domain type
|
||||
const repoStates = Object.fromEntries(
|
||||
Object.entries(repoStatesData).map(([key, value]) => {
|
||||
return [key, mapRepoStateToDomain(value)];
|
||||
}),
|
||||
);
|
||||
const repoStates = mapRepoStatesToDomainModel(repoStatesData);
|
||||
|
||||
return repoStates;
|
||||
} catch (e) {
|
||||
@@ -0,0 +1 @@
|
||||
export * from "./repo-tasks-store";
|
||||
@@ -4,12 +4,12 @@ import {
|
||||
VariantAnalysisRepoStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisRepositoryTaskData,
|
||||
VariantAnalysisRepoStatusData,
|
||||
} from "./repo-task-data-types";
|
||||
VariantAnalysisRepositoryTaskDto,
|
||||
VariantAnalysisRepoStatusDto,
|
||||
} from "./repo-tasks-dto";
|
||||
|
||||
export function mapRepoTaskToDomain(
|
||||
repoTask: VariantAnalysisRepositoryTaskData,
|
||||
export function mapRepoTaskToDomainModel(
|
||||
repoTask: VariantAnalysisRepositoryTaskDto,
|
||||
): VariantAnalysisRepositoryTask {
|
||||
return {
|
||||
repository: {
|
||||
@@ -17,7 +17,9 @@ export function mapRepoTaskToDomain(
|
||||
fullName: repoTask.repository.fullName,
|
||||
private: repoTask.repository.private,
|
||||
},
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDomain(repoTask.analysisStatus),
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDomainModel(
|
||||
repoTask.analysisStatus,
|
||||
),
|
||||
resultCount: repoTask.resultCount,
|
||||
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||
failureMessage: repoTask.failureMessage,
|
||||
@@ -27,21 +29,21 @@ export function mapRepoTaskToDomain(
|
||||
};
|
||||
}
|
||||
|
||||
function mapRepoTaskAnalysisStatusToDomain(
|
||||
analysisStatus: VariantAnalysisRepoStatusData,
|
||||
function mapRepoTaskAnalysisStatusToDomainModel(
|
||||
analysisStatus: VariantAnalysisRepoStatusDto,
|
||||
): VariantAnalysisRepoStatus {
|
||||
switch (analysisStatus) {
|
||||
case VariantAnalysisRepoStatusData.Pending:
|
||||
case VariantAnalysisRepoStatusDto.Pending:
|
||||
return VariantAnalysisRepoStatus.Pending;
|
||||
case VariantAnalysisRepoStatusData.InProgress:
|
||||
case VariantAnalysisRepoStatusDto.InProgress:
|
||||
return VariantAnalysisRepoStatus.InProgress;
|
||||
case VariantAnalysisRepoStatusData.Succeeded:
|
||||
case VariantAnalysisRepoStatusDto.Succeeded:
|
||||
return VariantAnalysisRepoStatus.Succeeded;
|
||||
case VariantAnalysisRepoStatusData.Failed:
|
||||
case VariantAnalysisRepoStatusDto.Failed:
|
||||
return VariantAnalysisRepoStatus.Failed;
|
||||
case VariantAnalysisRepoStatusData.Canceled:
|
||||
case VariantAnalysisRepoStatusDto.Canceled:
|
||||
return VariantAnalysisRepoStatus.Canceled;
|
||||
case VariantAnalysisRepoStatusData.TimedOut:
|
||||
case VariantAnalysisRepoStatusDto.TimedOut:
|
||||
return VariantAnalysisRepoStatus.TimedOut;
|
||||
default:
|
||||
assertNever(analysisStatus);
|
||||
@@ -4,20 +4,20 @@ import {
|
||||
VariantAnalysisRepoStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisRepositoryTaskData,
|
||||
VariantAnalysisRepoStatusData,
|
||||
} from "./repo-task-data-types";
|
||||
VariantAnalysisRepositoryTaskDto,
|
||||
VariantAnalysisRepoStatusDto,
|
||||
} from "./repo-tasks-dto";
|
||||
|
||||
export function mapRepoTaskToData(
|
||||
export function mapRepoTaskToDto(
|
||||
repoTask: VariantAnalysisRepositoryTask,
|
||||
): VariantAnalysisRepositoryTaskData {
|
||||
): VariantAnalysisRepositoryTaskDto {
|
||||
return {
|
||||
repository: {
|
||||
id: repoTask.repository.id,
|
||||
fullName: repoTask.repository.fullName,
|
||||
private: repoTask.repository.private,
|
||||
},
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToData(repoTask.analysisStatus),
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDto(repoTask.analysisStatus),
|
||||
resultCount: repoTask.resultCount,
|
||||
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||
failureMessage: repoTask.failureMessage,
|
||||
@@ -27,22 +27,22 @@ export function mapRepoTaskToData(
|
||||
};
|
||||
}
|
||||
|
||||
function mapRepoTaskAnalysisStatusToData(
|
||||
function mapRepoTaskAnalysisStatusToDto(
|
||||
analysisStatus: VariantAnalysisRepoStatus,
|
||||
): VariantAnalysisRepoStatusData {
|
||||
): VariantAnalysisRepoStatusDto {
|
||||
switch (analysisStatus) {
|
||||
case VariantAnalysisRepoStatus.Pending:
|
||||
return VariantAnalysisRepoStatusData.Pending;
|
||||
return VariantAnalysisRepoStatusDto.Pending;
|
||||
case VariantAnalysisRepoStatus.InProgress:
|
||||
return VariantAnalysisRepoStatusData.InProgress;
|
||||
return VariantAnalysisRepoStatusDto.InProgress;
|
||||
case VariantAnalysisRepoStatus.Succeeded:
|
||||
return VariantAnalysisRepoStatusData.Succeeded;
|
||||
return VariantAnalysisRepoStatusDto.Succeeded;
|
||||
case VariantAnalysisRepoStatus.Failed:
|
||||
return VariantAnalysisRepoStatusData.Failed;
|
||||
return VariantAnalysisRepoStatusDto.Failed;
|
||||
case VariantAnalysisRepoStatus.Canceled:
|
||||
return VariantAnalysisRepoStatusData.Canceled;
|
||||
return VariantAnalysisRepoStatusDto.Canceled;
|
||||
case VariantAnalysisRepoStatus.TimedOut:
|
||||
return VariantAnalysisRepoStatusData.TimedOut;
|
||||
return VariantAnalysisRepoStatusDto.TimedOut;
|
||||
default:
|
||||
assertNever(analysisStatus);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
export interface VariantAnalysisRepositoryTaskData {
|
||||
repository: RepositoryData;
|
||||
analysisStatus: VariantAnalysisRepoStatusData;
|
||||
export interface VariantAnalysisRepositoryTaskDto {
|
||||
repository: RepositoryDto;
|
||||
analysisStatus: VariantAnalysisRepoStatusDto;
|
||||
resultCount?: number;
|
||||
artifactSizeInBytes?: number;
|
||||
failureMessage?: string;
|
||||
@@ -9,13 +9,13 @@ export interface VariantAnalysisRepositoryTaskData {
|
||||
artifactUrl?: string;
|
||||
}
|
||||
|
||||
interface RepositoryData {
|
||||
interface RepositoryDto {
|
||||
id: number;
|
||||
fullName: string;
|
||||
private: boolean;
|
||||
}
|
||||
|
||||
export enum VariantAnalysisRepoStatusData {
|
||||
export enum VariantAnalysisRepoStatusDto {
|
||||
Pending = "pending",
|
||||
InProgress = "inProgress",
|
||||
Succeeded = "succeeded",
|
||||
@@ -1,8 +1,8 @@
|
||||
import { outputJson, readJson } from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { VariantAnalysisRepositoryTask } from "../shared/variant-analysis";
|
||||
import { mapRepoTaskToData } from "./repo-task-to-data-mapper";
|
||||
import { mapRepoTaskToDomain } from "./repo-task-to-domain-mapper";
|
||||
import { mapRepoTaskToDto } from "./repo-tasks-dto-mapper";
|
||||
import { mapRepoTaskToDomainModel } from "./repo-tasks-domain-mapper";
|
||||
|
||||
export const REPO_TASK_FILENAME = "repo_task.json";
|
||||
|
||||
@@ -10,7 +10,7 @@ export async function writeRepoTask(
|
||||
storageDirectory: string,
|
||||
repoTask: VariantAnalysisRepositoryTask,
|
||||
): Promise<void> {
|
||||
const repoTaskData = mapRepoTaskToData(repoTask);
|
||||
const repoTaskData = mapRepoTaskToDto(repoTask);
|
||||
await outputJson(join(storageDirectory, REPO_TASK_FILENAME), repoTaskData);
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@ export async function readRepoTask(
|
||||
const repoTaskData = await readJson(
|
||||
join(storageDirectory, REPO_TASK_FILENAME),
|
||||
);
|
||||
return mapRepoTaskToDomain(repoTaskData);
|
||||
return mapRepoTaskToDomainModel(repoTaskData);
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
export interface VariantAnalysisScannedRepositoryStateData {
|
||||
repositoryId: number;
|
||||
downloadStatus: VariantAnalysisScannedRepositoryDownloadData;
|
||||
downloadPercentage?: number;
|
||||
}
|
||||
|
||||
export enum VariantAnalysisScannedRepositoryDownloadData {
|
||||
Pending = "pending",
|
||||
InProgress = "inProgress",
|
||||
Succeeded = "succeeded",
|
||||
Failed = "failed",
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryDownloadData,
|
||||
VariantAnalysisScannedRepositoryStateData,
|
||||
} from "./repo-states-data-types";
|
||||
|
||||
export function mapRepoStateToData(
|
||||
repoState: VariantAnalysisScannedRepositoryState,
|
||||
): VariantAnalysisScannedRepositoryStateData {
|
||||
return {
|
||||
repositoryId: repoState.repositoryId,
|
||||
downloadStatus: processDownloadStatus(repoState.downloadStatus),
|
||||
downloadPercentage: repoState.downloadPercentage,
|
||||
};
|
||||
}
|
||||
|
||||
function processDownloadStatus(
|
||||
downloadedStatus: VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
) {
|
||||
switch (downloadedStatus) {
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Pending:
|
||||
return VariantAnalysisScannedRepositoryDownloadData.Pending;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.InProgress:
|
||||
return VariantAnalysisScannedRepositoryDownloadData.InProgress;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Succeeded:
|
||||
return VariantAnalysisScannedRepositoryDownloadData.Succeeded;
|
||||
case VariantAnalysisScannedRepositoryDownloadStatus.Failed:
|
||||
return VariantAnalysisScannedRepositoryDownloadData.Failed;
|
||||
default:
|
||||
assertNever(downloadedStatus);
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisScannedRepositoryStateData,
|
||||
VariantAnalysisScannedRepositoryDownloadData,
|
||||
} from "./repo-states-data-types";
|
||||
|
||||
export function mapRepoStateToDomain(
|
||||
repoState: VariantAnalysisScannedRepositoryStateData,
|
||||
): VariantAnalysisScannedRepositoryState {
|
||||
return {
|
||||
repositoryId: repoState.repositoryId,
|
||||
downloadStatus: processDownloadStatus(repoState.downloadStatus),
|
||||
downloadPercentage: repoState.downloadPercentage,
|
||||
};
|
||||
}
|
||||
|
||||
function processDownloadStatus(
|
||||
downloadedStatus: VariantAnalysisScannedRepositoryDownloadData,
|
||||
) {
|
||||
switch (downloadedStatus) {
|
||||
case VariantAnalysisScannedRepositoryDownloadData.Pending:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Pending;
|
||||
case VariantAnalysisScannedRepositoryDownloadData.InProgress:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.InProgress;
|
||||
case VariantAnalysisScannedRepositoryDownloadData.Succeeded:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||
case VariantAnalysisScannedRepositoryDownloadData.Failed:
|
||||
return VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||
default:
|
||||
assertNever(downloadedStatus);
|
||||
}
|
||||
}
|
||||
@@ -71,7 +71,7 @@ import {
|
||||
readRepoStates,
|
||||
REPO_STATES_FILENAME,
|
||||
writeRepoStates,
|
||||
} from "./store/repo-states-store";
|
||||
} from "./repo-states-store";
|
||||
|
||||
export class VariantAnalysisManager
|
||||
extends DisposableObject
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
import { DisposableObject, DisposeHandler } from "../pure/disposable-object";
|
||||
import { EventEmitter } from "vscode";
|
||||
import { unzipFile } from "../pure/zip";
|
||||
import { readRepoTask, writeRepoTask } from "./store/repo-task-store";
|
||||
import { readRepoTask, writeRepoTask } from "./repo-tasks-store";
|
||||
|
||||
type CacheKey = `${number}/${string}`;
|
||||
|
||||
|
||||
@@ -149,14 +149,14 @@ describe("loadDataExtensionYaml", () => {
|
||||
});
|
||||
|
||||
it("returns undefined if given a string", () => {
|
||||
const data = loadDataExtensionYaml(`extensions:
|
||||
expect(() =>
|
||||
loadDataExtensionYaml(`extensions:
|
||||
- addsTo:
|
||||
pack: codeql/java-all
|
||||
extensible: sinkModel
|
||||
data:
|
||||
- ["org.sql2o","Connection",true,"createQuery","(String)","","Argument[0]","sql","manual"]
|
||||
`);
|
||||
|
||||
expect(data).toBeUndefined();
|
||||
`),
|
||||
).toThrow("Invalid data extension YAML: must be object");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -48,7 +48,7 @@ import { mockedObject } from "../../utils/mocking.helpers";
|
||||
import {
|
||||
REPO_STATES_FILENAME,
|
||||
writeRepoStates,
|
||||
} from "../../../../src/variant-analysis/store/repo-states-store";
|
||||
} from "../../../../src/variant-analysis/repo-states-store";
|
||||
|
||||
// up to 3 minutes per test
|
||||
jest.setTimeout(3 * 60 * 1000);
|
||||
|
||||
@@ -20,8 +20,9 @@ import {
|
||||
} from "../../../src/local-databases";
|
||||
import * as databaseFetcher from "../../../src/databaseFetcher";
|
||||
import { createMockDB } from "../../factories/databases/databases";
|
||||
import { asError } from "../../../src/pure/helpers-pure";
|
||||
|
||||
jest.setTimeout(40_000);
|
||||
jest.setTimeout(80_000);
|
||||
|
||||
describe("SkeletonQueryWizard", () => {
|
||||
let mockCli: CodeQLCliServer;
|
||||
@@ -83,11 +84,11 @@ describe("SkeletonQueryWizard", () => {
|
||||
jest.spyOn(workspace, "workspaceFolders", "get").mockReturnValue([
|
||||
{
|
||||
name: `codespaces-codeql`,
|
||||
uri: { path: storagePath },
|
||||
uri: { fsPath: storagePath },
|
||||
},
|
||||
{
|
||||
name: "/second/folder/path",
|
||||
uri: { path: storagePath },
|
||||
uri: { fsPath: storagePath },
|
||||
},
|
||||
] as WorkspaceFolder[]);
|
||||
|
||||
@@ -114,6 +115,7 @@ describe("SkeletonQueryWizard", () => {
|
||||
extLogger,
|
||||
mockDatabaseManager,
|
||||
token,
|
||||
storagePath,
|
||||
);
|
||||
|
||||
askForGitHubRepoSpy = jest
|
||||
@@ -244,6 +246,7 @@ describe("SkeletonQueryWizard", () => {
|
||||
extLogger,
|
||||
mockDatabaseManagerWithItems,
|
||||
token,
|
||||
storagePath,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -305,8 +308,8 @@ describe("SkeletonQueryWizard", () => {
|
||||
it("should return the first workspace folder", async () => {
|
||||
jest.spyOn(workspace, "workspaceFolders", "get").mockReturnValue([
|
||||
{
|
||||
name: "codeql-custom-queries-cpp",
|
||||
uri: { path: "codespaces-codeql" },
|
||||
name: "codespaces-codeql",
|
||||
uri: { fsPath: "codespaces-codeql" },
|
||||
},
|
||||
] as WorkspaceFolder[]);
|
||||
|
||||
@@ -317,6 +320,7 @@ describe("SkeletonQueryWizard", () => {
|
||||
extLogger,
|
||||
mockDatabaseManager,
|
||||
token,
|
||||
storagePath,
|
||||
);
|
||||
|
||||
expect(wizard.getFirstStoragePath()).toEqual("codespaces-codeql");
|
||||
@@ -327,11 +331,21 @@ describe("SkeletonQueryWizard", () => {
|
||||
jest.spyOn(workspace, "workspaceFolders", "get").mockReturnValue([
|
||||
{
|
||||
name: "codeql-custom-queries-cpp",
|
||||
uri: { path: "vscode-codeql-starter/codeql-custom-queries-cpp" },
|
||||
uri: {
|
||||
fsPath: join(
|
||||
"vscode-codeql-starter",
|
||||
"codeql-custom-queries-cpp",
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "codeql-custom-queries-csharp",
|
||||
uri: { path: "vscode-codeql-starter/codeql-custom-queries-csharp" },
|
||||
uri: {
|
||||
fsPath: join(
|
||||
"vscode-codeql-starter",
|
||||
"codeql-custom-queries-csharp",
|
||||
),
|
||||
},
|
||||
},
|
||||
] as WorkspaceFolder[]);
|
||||
|
||||
@@ -342,6 +356,7 @@ describe("SkeletonQueryWizard", () => {
|
||||
extLogger,
|
||||
mockDatabaseManager,
|
||||
token,
|
||||
storagePath,
|
||||
);
|
||||
|
||||
expect(wizard.getFirstStoragePath()).toEqual("vscode-codeql-starter");
|
||||
@@ -352,8 +367,15 @@ describe("SkeletonQueryWizard", () => {
|
||||
describe("findDatabaseItemByNwo", () => {
|
||||
describe("when the item exists", () => {
|
||||
it("should return the database item", async () => {
|
||||
const mockDbItem = createMockDB(dir);
|
||||
const mockDbItem2 = createMockDB(dir);
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 123,
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest.spyOn(mockDbItem, "name", "get").mockReturnValue("mock-name");
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByNwo(
|
||||
mockDbItem.language,
|
||||
@@ -361,8 +383,40 @@ describe("SkeletonQueryWizard", () => {
|
||||
[mockDbItem, mockDbItem2],
|
||||
);
|
||||
|
||||
expect(databaseItem!.language).toEqual(mockDbItem.language);
|
||||
expect(databaseItem!.name).toEqual(mockDbItem.name);
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem),
|
||||
);
|
||||
});
|
||||
|
||||
it("should ignore databases with errors", async () => {
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 123,
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem3 = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 345,
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest.spyOn(mockDbItem, "name", "get").mockReturnValue("mock-name");
|
||||
jest.spyOn(mockDbItem3, "name", "get").mockReturnValue(mockDbItem.name);
|
||||
|
||||
jest
|
||||
.spyOn(mockDbItem, "error", "get")
|
||||
.mockReturnValue(asError("database go boom!"));
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByNwo(
|
||||
mockDbItem.language,
|
||||
mockDbItem.name,
|
||||
[mockDbItem, mockDbItem2, mockDbItem3],
|
||||
);
|
||||
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem3),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -399,6 +453,32 @@ describe("SkeletonQueryWizard", () => {
|
||||
|
||||
expect(databaseItem).toEqual(mockDbItem);
|
||||
});
|
||||
|
||||
it("should ignore databases with errors", async () => {
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem3 = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest
|
||||
.spyOn(mockDbItem, "error", "get")
|
||||
.mockReturnValue(asError("database go boom!"));
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByLanguage("ruby", [
|
||||
mockDbItem,
|
||||
mockDbItem2,
|
||||
mockDbItem3,
|
||||
]);
|
||||
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem3),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when the item doesn't exist", () => {
|
||||
|
||||
@@ -0,0 +1,207 @@
|
||||
import { QuickPickItem, window } from "vscode";
|
||||
|
||||
import { pickExtensionPackModelFile } from "../../../../src/data-extensions-editor/extension-pack-picker";
|
||||
import { QlpacksInfo, ResolveExtensionsResult } from "../../../../src/cli";
|
||||
import * as helpers from "../../../../src/helpers";
|
||||
|
||||
describe("pickExtensionPackModelFile", () => {
|
||||
const qlPacks = {
|
||||
"my-extension-pack": ["/a/b/c/my-extension-pack"],
|
||||
"another-extension-pack": ["/a/b/c/another-extension-pack"],
|
||||
};
|
||||
const extensions = {
|
||||
models: [],
|
||||
data: {
|
||||
"/a/b/c/my-extension-pack": [
|
||||
{
|
||||
file: "/a/b/c/my-extension-pack/models/model.yml",
|
||||
index: 0,
|
||||
predicate: "sinkModel",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const progress = jest.fn();
|
||||
let showQuickPickSpy: jest.SpiedFunction<typeof window.showQuickPick>;
|
||||
|
||||
beforeEach(() => {
|
||||
showQuickPickSpy = jest
|
||||
.spyOn(window, "showQuickPick")
|
||||
.mockRejectedValue(new Error("Unexpected call to showQuickPick"));
|
||||
});
|
||||
|
||||
it("allows choosing an existing extension pack and model file", async () => {
|
||||
const cliServer = mockCliServer(qlPacks, extensions);
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce({
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
} as QuickPickItem);
|
||||
showQuickPickSpy.mockResolvedValueOnce({
|
||||
label: "models/model.yml",
|
||||
file: "/a/b/c/my-extension-pack/models/model.yml",
|
||||
} as QuickPickItem);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
"/a/b/c/my-extension-pack/models/model.yml",
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledTimes(2);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledWith(
|
||||
[
|
||||
{
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
},
|
||||
{
|
||||
label: "another-extension-pack",
|
||||
extensionPack: "another-extension-pack",
|
||||
},
|
||||
],
|
||||
{
|
||||
title: expect.any(String),
|
||||
},
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledWith(
|
||||
[
|
||||
{
|
||||
label: "models/model.yml",
|
||||
file: "/a/b/c/my-extension-pack/models/model.yml",
|
||||
},
|
||||
],
|
||||
{
|
||||
title: expect.any(String),
|
||||
},
|
||||
);
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalledTimes(1);
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalledWith([], true);
|
||||
expect(cliServer.resolveExtensions).toHaveBeenCalledTimes(1);
|
||||
expect(cliServer.resolveExtensions).toHaveBeenCalledWith(
|
||||
"/a/b/c/my-extension-pack",
|
||||
[],
|
||||
);
|
||||
});
|
||||
|
||||
it("allows cancelling the extension pack prompt", async () => {
|
||||
const cliServer = mockCliServer(qlPacks, extensions);
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce(undefined);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
undefined,
|
||||
);
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalled();
|
||||
expect(cliServer.resolveExtensions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("does not show any options when there are no extension packs", async () => {
|
||||
const cliServer = mockCliServer({}, { models: [], data: {} });
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce(undefined);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
undefined,
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledTimes(1);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledWith([], {
|
||||
title: expect.any(String),
|
||||
});
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalled();
|
||||
expect(cliServer.resolveExtensions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("shows an error when an extension pack resolves to more than 1 location", async () => {
|
||||
const showAndLogErrorMessageSpy = jest.spyOn(
|
||||
helpers,
|
||||
"showAndLogErrorMessage",
|
||||
);
|
||||
|
||||
const cliServer = mockCliServer(
|
||||
{
|
||||
"my-extension-pack": [
|
||||
"/a/b/c/my-extension-pack",
|
||||
"/a/b/c/my-extension-pack2",
|
||||
],
|
||||
},
|
||||
{ models: [], data: {} },
|
||||
);
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce({
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
} as QuickPickItem);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
undefined,
|
||||
);
|
||||
expect(showAndLogErrorMessageSpy).toHaveBeenCalledTimes(1);
|
||||
expect(showAndLogErrorMessageSpy).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/could not be resolved to a single location/),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledTimes(1);
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalled();
|
||||
expect(cliServer.resolveExtensions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("allows cancelling the model file prompt", async () => {
|
||||
const cliServer = mockCliServer(qlPacks, extensions);
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce({
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
} as QuickPickItem);
|
||||
showQuickPickSpy.mockResolvedValueOnce(undefined);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
undefined,
|
||||
);
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalled();
|
||||
expect(cliServer.resolveExtensions).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("does not show any options when there are no model files", async () => {
|
||||
const cliServer = mockCliServer(qlPacks, { models: [], data: {} });
|
||||
|
||||
showQuickPickSpy.mockResolvedValueOnce({
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
} as QuickPickItem);
|
||||
showQuickPickSpy.mockResolvedValueOnce(undefined);
|
||||
|
||||
expect(await pickExtensionPackModelFile(cliServer, progress)).toEqual(
|
||||
undefined,
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledTimes(2);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledWith(
|
||||
[
|
||||
{
|
||||
label: "my-extension-pack",
|
||||
extensionPack: "my-extension-pack",
|
||||
},
|
||||
{
|
||||
label: "another-extension-pack",
|
||||
extensionPack: "another-extension-pack",
|
||||
},
|
||||
],
|
||||
{
|
||||
title: expect.any(String),
|
||||
},
|
||||
);
|
||||
expect(showQuickPickSpy).toHaveBeenCalledWith([], {
|
||||
title: expect.any(String),
|
||||
});
|
||||
expect(cliServer.resolveQlpacks).toHaveBeenCalled();
|
||||
expect(cliServer.resolveExtensions).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
function mockCliServer(
|
||||
qlpacks: QlpacksInfo,
|
||||
extensions: ResolveExtensionsResult,
|
||||
) {
|
||||
return {
|
||||
resolveQlpacks: jest.fn().mockResolvedValue(qlpacks),
|
||||
resolveExtensions: jest.fn().mockResolvedValue(extensions),
|
||||
};
|
||||
}
|
||||
@@ -10,6 +10,8 @@ import { file } from "tmp-promise";
|
||||
import { QueryResultType } from "../../../../src/pure/new-messages";
|
||||
import { readFile } from "fs-extra";
|
||||
import { load } from "js-yaml";
|
||||
import * as helpers from "../../../../src/helpers";
|
||||
import { RedactableError } from "../../../../src/pure/errors";
|
||||
|
||||
function createMockUri(path = "/a/b/c/foo"): Uri {
|
||||
return {
|
||||
@@ -127,17 +129,27 @@ describe("readQueryResults", () => {
|
||||
bqrsDecode: jest.fn(),
|
||||
},
|
||||
bqrsPath: "/tmp/results.bqrs",
|
||||
logger: createMockLogger(),
|
||||
};
|
||||
|
||||
let showAndLogExceptionWithTelemetrySpy: jest.SpiedFunction<
|
||||
typeof helpers.showAndLogExceptionWithTelemetry
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
showAndLogExceptionWithTelemetrySpy = jest.spyOn(
|
||||
helpers,
|
||||
"showAndLogExceptionWithTelemetry",
|
||||
);
|
||||
});
|
||||
|
||||
it("returns undefined when there are no results", async () => {
|
||||
options.cliServer.bqrsInfo.mockResolvedValue({
|
||||
"result-sets": [],
|
||||
});
|
||||
|
||||
expect(await readQueryResults(options)).toBeUndefined();
|
||||
expect(options.logger.log).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/Expected exactly one result set/),
|
||||
expect(showAndLogExceptionWithTelemetrySpy).toHaveBeenCalledWith(
|
||||
expect.any(RedactableError),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -166,8 +178,8 @@ describe("readQueryResults", () => {
|
||||
});
|
||||
|
||||
expect(await readQueryResults(options)).toBeUndefined();
|
||||
expect(options.logger.log).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/Expected exactly one result set/),
|
||||
expect(showAndLogExceptionWithTelemetrySpy).toHaveBeenCalledWith(
|
||||
expect.any(RedactableError),
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -132,7 +132,10 @@ describe("Variant Analyses and QueryHistoryManager", () => {
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
// Remove the first variant analysis
|
||||
await qhm.handleRemoveHistoryItem(qhm.treeDataProvider.allHistory[0]);
|
||||
await qhm.handleRemoveHistoryItem(
|
||||
qhm.treeDataProvider.allHistory[0],
|
||||
undefined,
|
||||
);
|
||||
|
||||
// Add it back to the history
|
||||
qhm.addQuery(rawQueryHistory[0]);
|
||||
|
||||
Reference in New Issue
Block a user