Merge remote-tracking branch 'origin/main' into koesie10/use-query-in-extension
This commit is contained in:
4131
extensions/ql-vscode/package-lock.json
generated
4131
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1497,7 +1497,7 @@
|
||||
"@storybook/addon-essentials": "^6.5.17-alpha.0",
|
||||
"@storybook/addon-interactions": "^6.5.17-alpha.0",
|
||||
"@storybook/addon-links": "^6.5.17-alpha.0",
|
||||
"@storybook/builder-webpack5": "^6.5.17-alpha.0",
|
||||
"@storybook/builder-webpack5": "^7.0.4",
|
||||
"@storybook/manager-webpack5": "^6.5.17-alpha.0",
|
||||
"@storybook/react": "^6.5.17-alpha.0",
|
||||
"@storybook/testing-library": "^0.0.13",
|
||||
@@ -1531,7 +1531,7 @@
|
||||
"@types/through2": "^2.0.36",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/vscode": "^1.67.0",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
|
||||
@@ -14,8 +14,8 @@ import {
|
||||
import { ProgressUpdate } from "../progress";
|
||||
import { QueryRunner } from "../queryRunner";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogWarningMessage,
|
||||
} from "../helpers";
|
||||
import { extLogger } from "../common";
|
||||
import { readFile, writeFile } from "fs-extra";
|
||||
@@ -166,7 +166,9 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
const existingModeledMethods = loadDataExtensionYaml(data);
|
||||
|
||||
if (!existingModeledMethods) {
|
||||
void showAndLogWarningMessage("Failed to parse data extension YAML.");
|
||||
void showAndLogErrorMessage(
|
||||
`Failed to parse data extension YAML ${this.modelFilename}.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -175,7 +177,11 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
modeledMethods: existingModeledMethods,
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
void extLogger.log(`Unable to read data extension YAML: ${e}`);
|
||||
void showAndLogErrorMessage(
|
||||
`Unable to read data extension YAML ${
|
||||
this.modelFilename
|
||||
}: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,7 +213,6 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
const bqrsChunk = await readQueryResults({
|
||||
cliServer: this.cliServer,
|
||||
bqrsPath: queryResult.outputDir.bqrsPath,
|
||||
logger: extLogger,
|
||||
});
|
||||
if (!bqrsChunk) {
|
||||
await this.clearProgress();
|
||||
@@ -232,7 +237,7 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(err),
|
||||
)`Failed to load external APi usages: ${getErrorMessage(err)}`,
|
||||
)`Failed to load external API usages: ${getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extensions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["addsTo", "data"],
|
||||
"properties": {
|
||||
"addsTo": {
|
||||
"type": "object",
|
||||
"required": ["pack", "extensible"],
|
||||
"properties": {
|
||||
"pack": {
|
||||
"type": "string"
|
||||
},
|
||||
"extensible": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
} from "../helpers";
|
||||
import { Logger, TeeLogger } from "../common";
|
||||
import { TeeLogger } from "../common";
|
||||
import { CancellationToken } from "vscode";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { DatabaseItem } from "../local-databases";
|
||||
@@ -108,18 +108,16 @@ export async function runQuery({
|
||||
export type GetResultsOptions = {
|
||||
cliServer: Pick<CodeQLCliServer, "bqrsInfo" | "bqrsDecode">;
|
||||
bqrsPath: string;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
export async function readQueryResults({
|
||||
cliServer,
|
||||
bqrsPath,
|
||||
logger,
|
||||
}: GetResultsOptions) {
|
||||
const bqrsInfo = await cliServer.bqrsInfo(bqrsPath);
|
||||
if (bqrsInfo["result-sets"].length !== 1) {
|
||||
void logger.log(
|
||||
`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -6,11 +6,16 @@ import { CodeQLCliServer } from "../cli";
|
||||
import { TeeLogger } from "../common";
|
||||
import { extensiblePredicateDefinitions } from "./yaml";
|
||||
import { ProgressCallback } from "../progress";
|
||||
import { getOnDiskWorkspaceFolders } from "../helpers";
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
} from "../helpers";
|
||||
import {
|
||||
ModeledMethodType,
|
||||
ModeledMethodWithSignature,
|
||||
} from "./modeled-method";
|
||||
import { redactableError } from "../pure/errors";
|
||||
import { QueryResultType } from "../pure/new-messages";
|
||||
|
||||
type FlowModelOptions = {
|
||||
cliServer: CodeQLCliServer;
|
||||
@@ -67,13 +72,21 @@ async function getModeledMethodsFromFlow(
|
||||
token,
|
||||
new TeeLogger(queryRunner.logger, queryRun.outputDir.logPath),
|
||||
);
|
||||
if (queryResult.resultType !== QueryResultType.SUCCESS) {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Failed to run ${queryName} query: ${
|
||||
queryResult.message ?? "No message"
|
||||
}`,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
const bqrsPath = queryResult.outputDir.bqrsPath;
|
||||
|
||||
const bqrsInfo = await cliServer.bqrsInfo(bqrsPath);
|
||||
if (bqrsInfo["result-sets"].length !== 1) {
|
||||
throw new Error(
|
||||
`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Expected exactly one result set, got ${bqrsInfo["result-sets"].length} for ${queryName}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ExternalApiUsage } from "./external-api-usage";
|
||||
import {
|
||||
ModeledMethod,
|
||||
@@ -5,6 +7,11 @@ import {
|
||||
ModeledMethodWithSignature,
|
||||
} from "./modeled-method";
|
||||
|
||||
import * as dataSchemaJson from "./data-schema.json";
|
||||
|
||||
const ajv = new Ajv({ allErrors: true });
|
||||
const dataSchemaValidate = ajv.compile(dataSchemaJson);
|
||||
|
||||
type ExternalApiUsageByType = {
|
||||
externalApiUsage: ExternalApiUsage;
|
||||
modeledMethod: ModeledMethod;
|
||||
@@ -191,8 +198,14 @@ ${extensions.join("\n")}`;
|
||||
export function loadDataExtensionYaml(
|
||||
data: any,
|
||||
): Record<string, ModeledMethod> | undefined {
|
||||
if (typeof data !== "object") {
|
||||
return undefined;
|
||||
dataSchemaValidate(data);
|
||||
|
||||
if (dataSchemaValidate.errors) {
|
||||
throw new Error(
|
||||
`Invalid data extension YAML: ${dataSchemaValidate.errors
|
||||
.map((error) => `${error.instancePath} ${error.message}`)
|
||||
.join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
const extensions = data.extensions;
|
||||
@@ -204,19 +217,8 @@ export function loadDataExtensionYaml(
|
||||
|
||||
for (const extension of extensions) {
|
||||
const addsTo = extension.addsTo;
|
||||
if (typeof addsTo !== "object") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const extensible = addsTo.extensible;
|
||||
if (typeof extensible !== "string") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const data = extension.data;
|
||||
if (!Array.isArray(data)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const definition = Object.values(extensiblePredicateDefinitions).find(
|
||||
(definition) => definition.extensiblePredicate === extensible,
|
||||
|
||||
@@ -34,11 +34,11 @@ import { DatabasePanelCommands } from "../../common/commands";
|
||||
import { App } from "../../common/app";
|
||||
|
||||
export interface RemoteDatabaseQuickPickItem extends QuickPickItem {
|
||||
kind: string;
|
||||
remoteDatabaseKind: string;
|
||||
}
|
||||
|
||||
export interface AddListQuickPickItem extends QuickPickItem {
|
||||
kind: DbListKind;
|
||||
databaseKind: DbListKind;
|
||||
}
|
||||
|
||||
export class DbPanel extends DisposableObject {
|
||||
@@ -113,19 +113,19 @@ export class DbPanel extends DisposableObject {
|
||||
) {
|
||||
await this.addNewRemoteRepo(highlightedItem.parentListName);
|
||||
} else {
|
||||
const quickPickItems = [
|
||||
const quickPickItems: RemoteDatabaseQuickPickItem[] = [
|
||||
{
|
||||
label: "$(repo) From a GitHub repository",
|
||||
detail: "Add a variant analysis repository from GitHub",
|
||||
alwaysShow: true,
|
||||
kind: "repo",
|
||||
remoteDatabaseKind: "repo",
|
||||
},
|
||||
{
|
||||
label: "$(organization) All repositories of a GitHub org or owner",
|
||||
detail:
|
||||
"Add a variant analysis list of repositories from a GitHub organization/owner",
|
||||
alwaysShow: true,
|
||||
kind: "owner",
|
||||
remoteDatabaseKind: "owner",
|
||||
},
|
||||
];
|
||||
const databaseKind =
|
||||
@@ -142,9 +142,9 @@ export class DbPanel extends DisposableObject {
|
||||
// We set 'true' to make this a silent exception.
|
||||
throw new UserCancellationException("No repository selected", true);
|
||||
}
|
||||
if (databaseKind.kind === "repo") {
|
||||
if (databaseKind.remoteDatabaseKind === "repo") {
|
||||
await this.addNewRemoteRepo();
|
||||
} else if (databaseKind.kind === "owner") {
|
||||
} else if (databaseKind.remoteDatabaseKind === "owner") {
|
||||
await this.addNewRemoteOwner();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,10 +39,7 @@ import {
|
||||
QueryStatus,
|
||||
variantAnalysisStatusToQueryStatus,
|
||||
} from "../query-status";
|
||||
import {
|
||||
readQueryHistoryFromFile,
|
||||
writeQueryHistoryToFile,
|
||||
} from "./store/query-history-store";
|
||||
import { readQueryHistoryFromFile, writeQueryHistoryToFile } from "./store";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { CliVersionConstraint } from "../cli";
|
||||
import { HistoryItemLabelProvider } from "./history-item-label-provider";
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
InitialQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { VariantAnalysisHistoryItem } from "../variant-analysis-history-item";
|
||||
import {
|
||||
CompletedQueryInfoData,
|
||||
QueryEvaluationInfoData,
|
||||
InitialQueryInfoData,
|
||||
LocalQueryDataItem,
|
||||
} from "./local-query-data-item";
|
||||
import { QueryHistoryDataItem } from "./query-history-data";
|
||||
|
||||
// Maps Query History Data Models to Domain Models
|
||||
|
||||
export function mapQueryHistoryToDomainModels(
|
||||
queries: QueryHistoryDataItem[],
|
||||
): QueryHistoryInfo[] {
|
||||
return queries.map((d) => {
|
||||
if (d.t === "variant-analysis") {
|
||||
const query: VariantAnalysisHistoryItem = d;
|
||||
return query;
|
||||
} else if (d.t === "local") {
|
||||
return mapLocalQueryDataItemToDomainModel(d);
|
||||
}
|
||||
|
||||
throw Error(
|
||||
`Unexpected or corrupted query history file. Unknown query history item: ${JSON.stringify(
|
||||
d,
|
||||
)}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryDataItemToDomainModel(
|
||||
localQuery: LocalQueryDataItem,
|
||||
): LocalQueryInfo {
|
||||
return new LocalQueryInfo(
|
||||
mapInitialQueryInfoDataToDomainModel(localQuery.initialInfo),
|
||||
undefined,
|
||||
localQuery.failureReason,
|
||||
localQuery.completedQuery &&
|
||||
mapCompletedQueryInfoDataToDomainModel(localQuery.completedQuery),
|
||||
localQuery.evalLogLocation,
|
||||
localQuery.evalLogSummaryLocation,
|
||||
localQuery.jsonEvalLogSummaryLocation,
|
||||
localQuery.evalLogSummarySymbolsLocation,
|
||||
);
|
||||
}
|
||||
|
||||
function mapCompletedQueryInfoDataToDomainModel(
|
||||
completedQuery: CompletedQueryInfoData,
|
||||
): CompletedQueryInfo {
|
||||
return new CompletedQueryInfo(
|
||||
mapQueryEvaluationInfoDataToDomainModel(completedQuery.query),
|
||||
{
|
||||
runId: completedQuery.result.runId,
|
||||
queryId: completedQuery.result.queryId,
|
||||
resultType: completedQuery.result.resultType,
|
||||
evaluationTime: completedQuery.result.evaluationTime,
|
||||
message: completedQuery.result.message,
|
||||
logFileLocation: completedQuery.result.logFileLocation,
|
||||
},
|
||||
completedQuery.logFileLocation,
|
||||
completedQuery.successful ?? completedQuery.sucessful,
|
||||
completedQuery.message,
|
||||
completedQuery.interpretedResultsSortState,
|
||||
completedQuery.resultCount,
|
||||
completedQuery.sortedResultsInfo,
|
||||
);
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoDataToDomainModel(
|
||||
initialInfo: InitialQueryInfoData,
|
||||
): InitialQueryInfo {
|
||||
return {
|
||||
userSpecifiedLabel: initialInfo.userSpecifiedLabel,
|
||||
queryText: initialInfo.queryText,
|
||||
isQuickQuery: initialInfo.isQuickQuery,
|
||||
isQuickEval: initialInfo.isQuickEval,
|
||||
quickEvalPosition: initialInfo.quickEvalPosition,
|
||||
queryPath: initialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: initialInfo.databaseInfo.databaseUri,
|
||||
name: initialInfo.databaseInfo.name,
|
||||
},
|
||||
start: new Date(initialInfo.start),
|
||||
id: initialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoDataToDomainModel(
|
||||
evaluationInfo: QueryEvaluationInfoData,
|
||||
): QueryEvaluationInfo {
|
||||
return new QueryEvaluationInfo(
|
||||
evaluationInfo.querySaveDir,
|
||||
evaluationInfo.dbItemPath,
|
||||
evaluationInfo.databaseHasMetadataFile,
|
||||
evaluationInfo.quickEvalPosition,
|
||||
evaluationInfo.metadata,
|
||||
);
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import { LocalQueryInfo, InitialQueryInfo } from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import {
|
||||
LocalQueryDataItem,
|
||||
InitialQueryInfoData,
|
||||
QueryEvaluationInfoData,
|
||||
} from "./local-query-data-item";
|
||||
import { QueryHistoryDataItem } from "./query-history-data";
|
||||
import { VariantAnalysisDataItem } from "./variant-analysis-data-item";
|
||||
|
||||
// Maps Query History Domain Models to Data Models
|
||||
|
||||
export function mapQueryHistoryToDataModels(
|
||||
queries: QueryHistoryInfo[],
|
||||
): QueryHistoryDataItem[] {
|
||||
return queries.map((q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
const query: VariantAnalysisDataItem = q;
|
||||
return query;
|
||||
} else if (q.t === "local") {
|
||||
return mapLocalQueryInfoToDataModel(q);
|
||||
} else {
|
||||
assertNever(q);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryInfoToDataModel(
|
||||
query: LocalQueryInfo,
|
||||
): LocalQueryDataItem {
|
||||
return {
|
||||
initialInfo: mapInitialQueryInfoToDataModel(query.initialInfo),
|
||||
t: "local",
|
||||
evalLogLocation: query.evalLogLocation,
|
||||
evalLogSummaryLocation: query.evalLogSummaryLocation,
|
||||
jsonEvalLogSummaryLocation: query.jsonEvalLogSummaryLocation,
|
||||
evalLogSummarySymbolsLocation: query.evalLogSummarySymbolsLocation,
|
||||
failureReason: query.failureReason,
|
||||
completedQuery: query.completedQuery && {
|
||||
query: mapQueryEvaluationInfoToDataModel(query.completedQuery.query),
|
||||
result: {
|
||||
runId: query.completedQuery.result.runId,
|
||||
queryId: query.completedQuery.result.queryId,
|
||||
resultType: query.completedQuery.result.resultType,
|
||||
evaluationTime: query.completedQuery.result.evaluationTime,
|
||||
message: query.completedQuery.result.message,
|
||||
logFileLocation: query.completedQuery.result.logFileLocation,
|
||||
},
|
||||
logFileLocation: query.completedQuery.logFileLocation,
|
||||
successful: query.completedQuery.successful,
|
||||
message: query.completedQuery.message,
|
||||
resultCount: query.completedQuery.resultCount,
|
||||
sortedResultsInfo: query.completedQuery.sortedResultsInfo,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDataModel(
|
||||
localQueryInitialInfo: InitialQueryInfo,
|
||||
): InitialQueryInfoData {
|
||||
return {
|
||||
userSpecifiedLabel: localQueryInitialInfo.userSpecifiedLabel,
|
||||
queryText: localQueryInitialInfo.queryText,
|
||||
isQuickQuery: localQueryInitialInfo.isQuickQuery,
|
||||
isQuickEval: localQueryInitialInfo.isQuickEval,
|
||||
quickEvalPosition: localQueryInitialInfo.quickEvalPosition,
|
||||
queryPath: localQueryInitialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: localQueryInitialInfo.databaseInfo.databaseUri,
|
||||
name: localQueryInitialInfo.databaseInfo.name,
|
||||
},
|
||||
start: localQueryInitialInfo.start,
|
||||
id: localQueryInitialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDataModel(
|
||||
queryEvaluationInfo: QueryEvaluationInfo,
|
||||
): QueryEvaluationInfoData {
|
||||
return {
|
||||
querySaveDir: queryEvaluationInfo.querySaveDir,
|
||||
dbItemPath: queryEvaluationInfo.dbItemPath,
|
||||
databaseHasMetadataFile: queryEvaluationInfo.databaseHasMetadataFile,
|
||||
quickEvalPosition: queryEvaluationInfo.quickEvalPosition,
|
||||
metadata: queryEvaluationInfo.metadata,
|
||||
resultsPaths: queryEvaluationInfo.resultsPaths,
|
||||
};
|
||||
}
|
||||
1
extensions/ql-vscode/src/query-history/store/index.ts
Normal file
1
extensions/ql-vscode/src/query-history/store/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./query-history-store";
|
||||
@@ -1,100 +0,0 @@
|
||||
export interface LocalQueryDataItem {
|
||||
initialInfo: InitialQueryInfoData;
|
||||
t: "local";
|
||||
evalLogLocation?: string;
|
||||
evalLogSummaryLocation?: string;
|
||||
jsonEvalLogSummaryLocation?: string;
|
||||
evalLogSummarySymbolsLocation?: string;
|
||||
completedQuery?: CompletedQueryInfoData;
|
||||
failureReason?: string;
|
||||
}
|
||||
|
||||
export interface InitialQueryInfoData {
|
||||
userSpecifiedLabel?: string;
|
||||
queryText: string;
|
||||
isQuickQuery: boolean;
|
||||
isQuickEval: boolean;
|
||||
quickEvalPosition?: PositionData;
|
||||
queryPath: string;
|
||||
databaseInfo: DatabaseInfoData;
|
||||
start: Date;
|
||||
id: string;
|
||||
}
|
||||
|
||||
interface DatabaseInfoData {
|
||||
name: string;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
interface PositionData {
|
||||
line: number;
|
||||
column: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface CompletedQueryInfoData {
|
||||
query: QueryEvaluationInfoData;
|
||||
message?: string;
|
||||
successful?: boolean;
|
||||
|
||||
// There once was a typo in the data model, which is why we need to support both
|
||||
sucessful?: boolean;
|
||||
result: EvaluationResultData;
|
||||
logFileLocation?: string;
|
||||
resultCount: number;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfo>;
|
||||
interpretedResultsSortState?: InterpretedResultsSortState;
|
||||
}
|
||||
|
||||
interface InterpretedResultsSortState {
|
||||
sortBy: InterpretedResultsSortColumn;
|
||||
sortDirection: SortDirection;
|
||||
}
|
||||
|
||||
type InterpretedResultsSortColumn = "alert-message";
|
||||
|
||||
interface SortedResultSetInfo {
|
||||
resultsPath: string;
|
||||
sortState: RawResultsSortState;
|
||||
}
|
||||
|
||||
interface RawResultsSortState {
|
||||
columnIndex: number;
|
||||
sortDirection: SortDirection;
|
||||
}
|
||||
|
||||
enum SortDirection {
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
interface EvaluationResultData {
|
||||
runId: number;
|
||||
queryId: number;
|
||||
resultType: number;
|
||||
evaluationTime: number;
|
||||
message?: string;
|
||||
logFileLocation?: string;
|
||||
}
|
||||
|
||||
export interface QueryEvaluationInfoData {
|
||||
querySaveDir: string;
|
||||
dbItemPath: string;
|
||||
databaseHasMetadataFile: boolean;
|
||||
quickEvalPosition?: PositionData;
|
||||
metadata?: QueryMetadataData;
|
||||
resultsPaths: {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface QueryMetadataData {
|
||||
name?: string;
|
||||
description?: string;
|
||||
id?: string;
|
||||
kind?: string;
|
||||
scored?: string;
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { LocalQueryDataItem } from "./local-query-data-item";
|
||||
import { VariantAnalysisDataItem } from "./variant-analysis-data-item";
|
||||
|
||||
export const ALLOWED_QUERY_HISTORY_VERSIONS = [1, 2];
|
||||
|
||||
export interface QueryHistoryData {
|
||||
version: number;
|
||||
queries: QueryHistoryDataItem[];
|
||||
}
|
||||
|
||||
export type QueryHistoryDataItem = LocalQueryDataItem | VariantAnalysisDataItem;
|
||||
@@ -0,0 +1,140 @@
|
||||
import { assertNever } from "../../pure/helpers-pure";
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
InitialQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import {
|
||||
QueryHistoryLocalQueryDto,
|
||||
InitialQueryInfoDto,
|
||||
QueryEvaluationInfoDto,
|
||||
CompletedQueryInfoDto,
|
||||
SortedResultSetInfoDto,
|
||||
SortDirectionDto,
|
||||
} from "./query-history-local-query-dto";
|
||||
import { QueryHistoryItemDto } from "./query-history-dto";
|
||||
import { QueryHistoryVariantAnalysisDto } from "./query-history-variant-analysis-dto";
|
||||
import {
|
||||
RawResultsSortState,
|
||||
SortDirection,
|
||||
SortedResultSetInfo,
|
||||
} from "../../pure/interface-types";
|
||||
|
||||
export function mapQueryHistoryToDto(
|
||||
queries: QueryHistoryInfo[],
|
||||
): QueryHistoryItemDto[] {
|
||||
return queries.map((q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
const query: QueryHistoryVariantAnalysisDto = q;
|
||||
return query;
|
||||
} else if (q.t === "local") {
|
||||
return mapLocalQueryInfoToDto(q);
|
||||
} else {
|
||||
assertNever(q);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryInfoToDto(
|
||||
query: LocalQueryInfo,
|
||||
): QueryHistoryLocalQueryDto {
|
||||
return {
|
||||
initialInfo: mapInitialQueryInfoToDto(query.initialInfo),
|
||||
t: "local",
|
||||
evalLogLocation: query.evalLogLocation,
|
||||
evalLogSummaryLocation: query.evalLogSummaryLocation,
|
||||
jsonEvalLogSummaryLocation: query.jsonEvalLogSummaryLocation,
|
||||
evalLogSummarySymbolsLocation: query.evalLogSummarySymbolsLocation,
|
||||
failureReason: query.failureReason,
|
||||
completedQuery:
|
||||
query.completedQuery && mapCompletedQueryToDto(query.completedQuery),
|
||||
};
|
||||
}
|
||||
|
||||
function mapCompletedQueryToDto(
|
||||
query: CompletedQueryInfo,
|
||||
): CompletedQueryInfoDto {
|
||||
const sortedResults = Object.fromEntries(
|
||||
Object.entries(query.sortedResultsInfo).map(([key, value]) => {
|
||||
return [key, mapSortedResultSetInfoToDto(value)];
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
query: mapQueryEvaluationInfoToDto(query.query),
|
||||
result: {
|
||||
runId: query.result.runId,
|
||||
queryId: query.result.queryId,
|
||||
resultType: query.result.resultType,
|
||||
evaluationTime: query.result.evaluationTime,
|
||||
message: query.result.message,
|
||||
logFileLocation: query.result.logFileLocation,
|
||||
},
|
||||
logFileLocation: query.logFileLocation,
|
||||
successful: query.successful,
|
||||
message: query.message,
|
||||
resultCount: query.resultCount,
|
||||
sortedResultsInfo: sortedResults,
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortDirectionToDto(sortDirection: SortDirection): SortDirectionDto {
|
||||
switch (sortDirection) {
|
||||
case SortDirection.asc:
|
||||
return SortDirectionDto.asc;
|
||||
case SortDirection.desc:
|
||||
return SortDirectionDto.desc;
|
||||
}
|
||||
}
|
||||
|
||||
function mapRawResultsSortStateToDto(
|
||||
sortState: RawResultsSortState,
|
||||
): SortedResultSetInfoDto["sortState"] {
|
||||
return {
|
||||
columnIndex: sortState.columnIndex,
|
||||
sortDirection: mapSortDirectionToDto(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortedResultSetInfoToDto(
|
||||
resultSet: SortedResultSetInfo,
|
||||
): SortedResultSetInfoDto {
|
||||
return {
|
||||
resultsPath: resultSet.resultsPath,
|
||||
sortState: mapRawResultsSortStateToDto(resultSet.sortState),
|
||||
};
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDto(
|
||||
localQueryInitialInfo: InitialQueryInfo,
|
||||
): InitialQueryInfoDto {
|
||||
return {
|
||||
userSpecifiedLabel: localQueryInitialInfo.userSpecifiedLabel,
|
||||
queryText: localQueryInitialInfo.queryText,
|
||||
isQuickQuery: localQueryInitialInfo.isQuickQuery,
|
||||
isQuickEval: localQueryInitialInfo.isQuickEval,
|
||||
quickEvalPosition: localQueryInitialInfo.quickEvalPosition,
|
||||
queryPath: localQueryInitialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: localQueryInitialInfo.databaseInfo.databaseUri,
|
||||
name: localQueryInitialInfo.databaseInfo.name,
|
||||
},
|
||||
start: localQueryInitialInfo.start,
|
||||
id: localQueryInitialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDto(
|
||||
queryEvaluationInfo: QueryEvaluationInfo,
|
||||
): QueryEvaluationInfoDto {
|
||||
return {
|
||||
querySaveDir: queryEvaluationInfo.querySaveDir,
|
||||
dbItemPath: queryEvaluationInfo.dbItemPath,
|
||||
databaseHasMetadataFile: queryEvaluationInfo.databaseHasMetadataFile,
|
||||
quickEvalPosition: queryEvaluationInfo.quickEvalPosition,
|
||||
metadata: queryEvaluationInfo.metadata,
|
||||
resultsPaths: queryEvaluationInfo.resultsPaths,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
import {
|
||||
LocalQueryInfo,
|
||||
CompletedQueryInfo,
|
||||
InitialQueryInfo,
|
||||
} from "../../query-results";
|
||||
import { QueryEvaluationInfo } from "../../run-queries-shared";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { VariantAnalysisHistoryItem } from "../variant-analysis-history-item";
|
||||
import {
|
||||
CompletedQueryInfoDto,
|
||||
QueryEvaluationInfoDto,
|
||||
InitialQueryInfoDto,
|
||||
QueryHistoryLocalQueryDto,
|
||||
SortDirectionDto,
|
||||
InterpretedResultsSortStateDto,
|
||||
SortedResultSetInfoDto,
|
||||
RawResultsSortStateDto,
|
||||
} from "./query-history-local-query-dto";
|
||||
import { QueryHistoryItemDto } from "./query-history-dto";
|
||||
import {
|
||||
InterpretedResultsSortState,
|
||||
RawResultsSortState,
|
||||
SortDirection,
|
||||
SortedResultSetInfo,
|
||||
} from "../../pure/interface-types";
|
||||
|
||||
export function mapQueryHistoryToDomainModel(
|
||||
queries: QueryHistoryItemDto[],
|
||||
): QueryHistoryInfo[] {
|
||||
return queries.map((d) => {
|
||||
if (d.t === "variant-analysis") {
|
||||
const query: VariantAnalysisHistoryItem = d;
|
||||
return query;
|
||||
} else if (d.t === "local") {
|
||||
return mapLocalQueryItemToDomainModel(d);
|
||||
}
|
||||
|
||||
throw Error(
|
||||
`Unexpected or corrupted query history file. Unknown query history item: ${JSON.stringify(
|
||||
d,
|
||||
)}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function mapLocalQueryItemToDomainModel(
|
||||
localQuery: QueryHistoryLocalQueryDto,
|
||||
): LocalQueryInfo {
|
||||
return new LocalQueryInfo(
|
||||
mapInitialQueryInfoToDomainModel(localQuery.initialInfo),
|
||||
undefined,
|
||||
localQuery.failureReason,
|
||||
localQuery.completedQuery &&
|
||||
mapCompletedQueryInfoToDomainModel(localQuery.completedQuery),
|
||||
localQuery.evalLogLocation,
|
||||
localQuery.evalLogSummaryLocation,
|
||||
localQuery.jsonEvalLogSummaryLocation,
|
||||
localQuery.evalLogSummarySymbolsLocation,
|
||||
);
|
||||
}
|
||||
|
||||
function mapCompletedQueryInfoToDomainModel(
|
||||
completedQuery: CompletedQueryInfoDto,
|
||||
): CompletedQueryInfo {
|
||||
const sortState =
|
||||
completedQuery.interpretedResultsSortState &&
|
||||
mapSortStateToDomainModel(completedQuery.interpretedResultsSortState);
|
||||
|
||||
const sortedResults = Object.fromEntries(
|
||||
Object.entries(completedQuery.sortedResultsInfo).map(([key, value]) => {
|
||||
return [key, mapSortedResultSetInfoToDomainModel(value)];
|
||||
}),
|
||||
);
|
||||
|
||||
return new CompletedQueryInfo(
|
||||
mapQueryEvaluationInfoToDomainModel(completedQuery.query),
|
||||
{
|
||||
runId: completedQuery.result.runId,
|
||||
queryId: completedQuery.result.queryId,
|
||||
resultType: completedQuery.result.resultType,
|
||||
evaluationTime: completedQuery.result.evaluationTime,
|
||||
message: completedQuery.result.message,
|
||||
logFileLocation: completedQuery.result.logFileLocation,
|
||||
},
|
||||
completedQuery.logFileLocation,
|
||||
completedQuery.successful ?? completedQuery.sucessful,
|
||||
completedQuery.message,
|
||||
sortState,
|
||||
completedQuery.resultCount,
|
||||
sortedResults,
|
||||
);
|
||||
}
|
||||
|
||||
function mapInitialQueryInfoToDomainModel(
|
||||
initialInfo: InitialQueryInfoDto,
|
||||
): InitialQueryInfo {
|
||||
return {
|
||||
userSpecifiedLabel: initialInfo.userSpecifiedLabel,
|
||||
queryText: initialInfo.queryText,
|
||||
isQuickQuery: initialInfo.isQuickQuery,
|
||||
isQuickEval: initialInfo.isQuickEval,
|
||||
quickEvalPosition: initialInfo.quickEvalPosition,
|
||||
queryPath: initialInfo.queryPath,
|
||||
databaseInfo: {
|
||||
databaseUri: initialInfo.databaseInfo.databaseUri,
|
||||
name: initialInfo.databaseInfo.name,
|
||||
},
|
||||
start: new Date(initialInfo.start),
|
||||
id: initialInfo.id,
|
||||
};
|
||||
}
|
||||
|
||||
function mapQueryEvaluationInfoToDomainModel(
|
||||
evaluationInfo: QueryEvaluationInfoDto,
|
||||
): QueryEvaluationInfo {
|
||||
return new QueryEvaluationInfo(
|
||||
evaluationInfo.querySaveDir,
|
||||
evaluationInfo.dbItemPath,
|
||||
evaluationInfo.databaseHasMetadataFile,
|
||||
evaluationInfo.quickEvalPosition,
|
||||
evaluationInfo.metadata,
|
||||
);
|
||||
}
|
||||
|
||||
function mapSortDirectionToDomainModel(
|
||||
sortDirection: SortDirectionDto,
|
||||
): SortDirection {
|
||||
switch (sortDirection) {
|
||||
case SortDirectionDto.asc:
|
||||
return SortDirection.asc;
|
||||
case SortDirectionDto.desc:
|
||||
return SortDirection.desc;
|
||||
}
|
||||
}
|
||||
|
||||
function mapSortStateToDomainModel(
|
||||
sortState: InterpretedResultsSortStateDto,
|
||||
): InterpretedResultsSortState {
|
||||
return {
|
||||
sortBy: sortState.sortBy,
|
||||
sortDirection: mapSortDirectionToDomainModel(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSortedResultSetInfoToDomainModel(
|
||||
sortedResultSetInfo: SortedResultSetInfoDto,
|
||||
): SortedResultSetInfo {
|
||||
return {
|
||||
resultsPath: sortedResultSetInfo.resultsPath,
|
||||
sortState: mapRawResultsSortStateToDomainModel(
|
||||
sortedResultSetInfo.sortState,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function mapRawResultsSortStateToDomainModel(
|
||||
sortState: RawResultsSortStateDto,
|
||||
): RawResultsSortState {
|
||||
return {
|
||||
columnIndex: sortState.columnIndex,
|
||||
sortDirection: mapSortDirectionToDomainModel(sortState.sortDirection),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { QueryHistoryLocalQueryDto } from "./query-history-local-query-dto";
|
||||
import { QueryHistoryVariantAnalysisDto } from "./query-history-variant-analysis-dto";
|
||||
|
||||
export interface QueryHistoryDto {
|
||||
version: number;
|
||||
queries: QueryHistoryItemDto[];
|
||||
}
|
||||
|
||||
export type QueryHistoryItemDto =
|
||||
| QueryHistoryLocalQueryDto
|
||||
| QueryHistoryVariantAnalysisDto;
|
||||
@@ -0,0 +1,103 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
export interface QueryHistoryLocalQueryDto {
|
||||
initialInfo: InitialQueryInfoDto;
|
||||
t: "local";
|
||||
evalLogLocation?: string;
|
||||
evalLogSummaryLocation?: string;
|
||||
jsonEvalLogSummaryLocation?: string;
|
||||
evalLogSummarySymbolsLocation?: string;
|
||||
completedQuery?: CompletedQueryInfoDto;
|
||||
failureReason?: string;
|
||||
}
|
||||
|
||||
export interface InitialQueryInfoDto {
|
||||
userSpecifiedLabel?: string;
|
||||
queryText: string;
|
||||
isQuickQuery: boolean;
|
||||
isQuickEval: boolean;
|
||||
quickEvalPosition?: PositionDto;
|
||||
queryPath: string;
|
||||
databaseInfo: DatabaseInfoDto;
|
||||
start: Date;
|
||||
id: string;
|
||||
}
|
||||
|
||||
interface DatabaseInfoDto {
|
||||
name: string;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
interface PositionDto {
|
||||
line: number;
|
||||
column: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface CompletedQueryInfoDto {
|
||||
query: QueryEvaluationInfoDto;
|
||||
message?: string;
|
||||
successful?: boolean;
|
||||
|
||||
// There once was a typo in the data model, which is why we need to support both
|
||||
sucessful?: boolean;
|
||||
result: EvaluationResultDto;
|
||||
logFileLocation?: string;
|
||||
resultCount: number;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfoDto>;
|
||||
interpretedResultsSortState?: InterpretedResultsSortStateDto;
|
||||
}
|
||||
|
||||
export interface InterpretedResultsSortStateDto {
|
||||
sortBy: InterpretedResultsSortColumnDto;
|
||||
sortDirection: SortDirectionDto;
|
||||
}
|
||||
|
||||
type InterpretedResultsSortColumnDto = "alert-message";
|
||||
|
||||
export interface SortedResultSetInfoDto {
|
||||
resultsPath: string;
|
||||
sortState: RawResultsSortStateDto;
|
||||
}
|
||||
|
||||
export interface RawResultsSortStateDto {
|
||||
columnIndex: number;
|
||||
sortDirection: SortDirectionDto;
|
||||
}
|
||||
|
||||
export enum SortDirectionDto {
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
interface EvaluationResultDto {
|
||||
runId: number;
|
||||
queryId: number;
|
||||
resultType: number;
|
||||
evaluationTime: number;
|
||||
message?: string;
|
||||
logFileLocation?: string;
|
||||
}
|
||||
|
||||
export interface QueryEvaluationInfoDto {
|
||||
querySaveDir: string;
|
||||
dbItemPath: string;
|
||||
databaseHasMetadataFile: boolean;
|
||||
quickEvalPosition?: PositionDto;
|
||||
metadata?: QueryMetadataDto;
|
||||
resultsPaths: {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface QueryMetadataDto {
|
||||
name?: string;
|
||||
description?: string;
|
||||
id?: string;
|
||||
kind?: string;
|
||||
scored?: string;
|
||||
}
|
||||
@@ -10,13 +10,11 @@ import {
|
||||
} from "../../pure/helpers-pure";
|
||||
import { QueryHistoryInfo } from "../query-history-info";
|
||||
import { redactableError } from "../../pure/errors";
|
||||
import {
|
||||
ALLOWED_QUERY_HISTORY_VERSIONS,
|
||||
QueryHistoryData,
|
||||
QueryHistoryDataItem,
|
||||
} from "./query-history-data";
|
||||
import { mapQueryHistoryToDomainModels } from "./data-mapper";
|
||||
import { mapQueryHistoryToDataModels } from "./domain-mapper";
|
||||
import { QueryHistoryDto, QueryHistoryItemDto } from "./query-history-dto";
|
||||
import { mapQueryHistoryToDomainModel } from "./query-history-dto-mapper";
|
||||
import { mapQueryHistoryToDto } from "./query-history-domain-mapper";
|
||||
|
||||
const ALLOWED_QUERY_HISTORY_VERSIONS = [1, 2];
|
||||
|
||||
export async function readQueryHistoryFromFile(
|
||||
fsPath: string,
|
||||
@@ -26,7 +24,7 @@ export async function readQueryHistoryFromFile(
|
||||
return [];
|
||||
}
|
||||
|
||||
const obj: QueryHistoryData = await readJson(fsPath, {
|
||||
const obj: QueryHistoryDto = await readJson(fsPath, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
|
||||
@@ -40,21 +38,21 @@ export async function readQueryHistoryFromFile(
|
||||
const queries = obj.queries;
|
||||
// Remove remote queries, which are not supported anymore.
|
||||
const parsedQueries = queries.filter(
|
||||
(q: QueryHistoryDataItem | { t: "remote" }) => q.t !== "remote",
|
||||
(q: QueryHistoryItemDto | { t: "remote" }) => q.t !== "remote",
|
||||
);
|
||||
|
||||
// Map the data models to the domain models.
|
||||
const domainModels: QueryHistoryInfo[] =
|
||||
mapQueryHistoryToDomainModels(parsedQueries);
|
||||
mapQueryHistoryToDomainModel(parsedQueries);
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// Filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
const filteredDomainModels: Promise<QueryHistoryInfo[]> = asyncFilter(
|
||||
domainModels,
|
||||
async (q) => {
|
||||
if (q.t === "variant-analysis") {
|
||||
// the query history store doesn't know where variant analysises are
|
||||
// The query history store doesn't know where variant analysises are
|
||||
// stored so we need to assume here that they exist. We check later
|
||||
// to see if they exist on disk.
|
||||
return true;
|
||||
@@ -72,7 +70,7 @@ export async function readQueryHistoryFromFile(
|
||||
fullMessage: `Error loading query history.\n${getErrorStack(e)}`,
|
||||
},
|
||||
);
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
// Since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
@@ -95,13 +93,13 @@ export async function writeQueryHistoryToFile(
|
||||
if (!(await pathExists(fsPath))) {
|
||||
await mkdir(dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
// Remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter((q) =>
|
||||
q.t === "local" ? q.completedQuery !== undefined : true,
|
||||
);
|
||||
|
||||
// map domain model queries to data model
|
||||
const queryHistoryData = mapQueryHistoryToDataModels(filteredQueries);
|
||||
// Map domain model queries to data model
|
||||
const queryHistoryData = mapQueryHistoryToDto(filteredQueries);
|
||||
|
||||
const data = JSON.stringify(
|
||||
{
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Contains models and consts for the data we want to store in the query history store.
|
||||
// Changes to these models should be done carefully and account for backwards compatibility of data.
|
||||
|
||||
import { QueryLanguage } from "../../common/query-language";
|
||||
import { QueryStatus } from "../../query-status";
|
||||
import {
|
||||
@@ -6,20 +9,19 @@ import {
|
||||
VariantAnalysisStatus,
|
||||
} from "../../variant-analysis/shared/variant-analysis";
|
||||
|
||||
// Data Model for Variant Analysis Query History Items
|
||||
// All data points are modelled, except enums.
|
||||
|
||||
export interface VariantAnalysisDataItem {
|
||||
export interface QueryHistoryVariantAnalysisDto {
|
||||
readonly t: "variant-analysis";
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
variantAnalysis: VariantAnalysisQueryHistoryData;
|
||||
variantAnalysis: VariantAnalysisQueryHistoryDto;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisQueryHistoryData {
|
||||
export interface VariantAnalysisQueryHistoryDto {
|
||||
id: number;
|
||||
controllerRepo: {
|
||||
id: number;
|
||||
@@ -44,11 +46,11 @@ export interface VariantAnalysisQueryHistoryData {
|
||||
completedAt?: string;
|
||||
actionsWorkflowRunId?: number;
|
||||
failureReason?: VariantAnalysisFailureReason;
|
||||
scannedRepos?: VariantAnalysisScannedRepositoryData[];
|
||||
skippedRepos?: VariantAnalysisSkippedRepositoriesData;
|
||||
scannedRepos?: VariantAnalysisScannedRepositoryDto[];
|
||||
skippedRepos?: VariantAnalysisSkippedRepositoriesDto;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisScannedRepositoryData {
|
||||
export interface VariantAnalysisScannedRepositoryDto {
|
||||
repository: {
|
||||
id: number;
|
||||
fullName: string;
|
||||
@@ -62,19 +64,19 @@ export interface VariantAnalysisScannedRepositoryData {
|
||||
failureMessage?: string;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoriesData {
|
||||
accessMismatchRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
notFoundRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
noCodeqlDbRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
overLimitRepos?: VariantAnalysisSkippedRepositoryGroupData;
|
||||
export interface VariantAnalysisSkippedRepositoriesDto {
|
||||
accessMismatchRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
notFoundRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
noCodeqlDbRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
overLimitRepos?: VariantAnalysisSkippedRepositoryGroupDto;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryGroupData {
|
||||
export interface VariantAnalysisSkippedRepositoryGroupDto {
|
||||
repositoryCount: number;
|
||||
repositories: VariantAnalysisSkippedRepositoryData[];
|
||||
repositories: VariantAnalysisSkippedRepositoryDto[];
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryData {
|
||||
export interface VariantAnalysisSkippedRepositoryDto {
|
||||
id?: number;
|
||||
fullName: string;
|
||||
private?: boolean;
|
||||
@@ -277,8 +277,12 @@ export class SkeletonQueryWizard {
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const dbItems = databaseItems || [];
|
||||
const dbs = dbItems.filter(
|
||||
(db) => db.language === language && db.name === databaseNwo,
|
||||
(db) =>
|
||||
db.language === language &&
|
||||
db.name === databaseNwo &&
|
||||
db.error === undefined,
|
||||
);
|
||||
|
||||
if (dbs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -290,7 +294,9 @@ export class SkeletonQueryWizard {
|
||||
databaseItems: readonly DatabaseItem[],
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const dbItems = databaseItems || [];
|
||||
const dbs = dbItems.filter((db) => db.language === language);
|
||||
const dbs = dbItems.filter(
|
||||
(db) => db.language === language && db.error === undefined,
|
||||
);
|
||||
if (dbs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from "./repo-tasks-store";
|
||||
@@ -4,12 +4,12 @@ import {
|
||||
VariantAnalysisRepoStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisRepositoryTaskData,
|
||||
VariantAnalysisRepoStatusData,
|
||||
} from "./repo-task-data-types";
|
||||
VariantAnalysisRepositoryTaskDto,
|
||||
VariantAnalysisRepoStatusDto,
|
||||
} from "./repo-tasks-dto";
|
||||
|
||||
export function mapRepoTaskToDomain(
|
||||
repoTask: VariantAnalysisRepositoryTaskData,
|
||||
export function mapRepoTaskToDomainModel(
|
||||
repoTask: VariantAnalysisRepositoryTaskDto,
|
||||
): VariantAnalysisRepositoryTask {
|
||||
return {
|
||||
repository: {
|
||||
@@ -17,7 +17,9 @@ export function mapRepoTaskToDomain(
|
||||
fullName: repoTask.repository.fullName,
|
||||
private: repoTask.repository.private,
|
||||
},
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDomain(repoTask.analysisStatus),
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDomainModel(
|
||||
repoTask.analysisStatus,
|
||||
),
|
||||
resultCount: repoTask.resultCount,
|
||||
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||
failureMessage: repoTask.failureMessage,
|
||||
@@ -27,21 +29,21 @@ export function mapRepoTaskToDomain(
|
||||
};
|
||||
}
|
||||
|
||||
function mapRepoTaskAnalysisStatusToDomain(
|
||||
analysisStatus: VariantAnalysisRepoStatusData,
|
||||
function mapRepoTaskAnalysisStatusToDomainModel(
|
||||
analysisStatus: VariantAnalysisRepoStatusDto,
|
||||
): VariantAnalysisRepoStatus {
|
||||
switch (analysisStatus) {
|
||||
case VariantAnalysisRepoStatusData.Pending:
|
||||
case VariantAnalysisRepoStatusDto.Pending:
|
||||
return VariantAnalysisRepoStatus.Pending;
|
||||
case VariantAnalysisRepoStatusData.InProgress:
|
||||
case VariantAnalysisRepoStatusDto.InProgress:
|
||||
return VariantAnalysisRepoStatus.InProgress;
|
||||
case VariantAnalysisRepoStatusData.Succeeded:
|
||||
case VariantAnalysisRepoStatusDto.Succeeded:
|
||||
return VariantAnalysisRepoStatus.Succeeded;
|
||||
case VariantAnalysisRepoStatusData.Failed:
|
||||
case VariantAnalysisRepoStatusDto.Failed:
|
||||
return VariantAnalysisRepoStatus.Failed;
|
||||
case VariantAnalysisRepoStatusData.Canceled:
|
||||
case VariantAnalysisRepoStatusDto.Canceled:
|
||||
return VariantAnalysisRepoStatus.Canceled;
|
||||
case VariantAnalysisRepoStatusData.TimedOut:
|
||||
case VariantAnalysisRepoStatusDto.TimedOut:
|
||||
return VariantAnalysisRepoStatus.TimedOut;
|
||||
default:
|
||||
assertNever(analysisStatus);
|
||||
@@ -4,20 +4,20 @@ import {
|
||||
VariantAnalysisRepoStatus,
|
||||
} from "../shared/variant-analysis";
|
||||
import {
|
||||
VariantAnalysisRepositoryTaskData,
|
||||
VariantAnalysisRepoStatusData,
|
||||
} from "./repo-task-data-types";
|
||||
VariantAnalysisRepositoryTaskDto,
|
||||
VariantAnalysisRepoStatusDto,
|
||||
} from "./repo-tasks-dto";
|
||||
|
||||
export function mapRepoTaskToData(
|
||||
export function mapRepoTaskToDto(
|
||||
repoTask: VariantAnalysisRepositoryTask,
|
||||
): VariantAnalysisRepositoryTaskData {
|
||||
): VariantAnalysisRepositoryTaskDto {
|
||||
return {
|
||||
repository: {
|
||||
id: repoTask.repository.id,
|
||||
fullName: repoTask.repository.fullName,
|
||||
private: repoTask.repository.private,
|
||||
},
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToData(repoTask.analysisStatus),
|
||||
analysisStatus: mapRepoTaskAnalysisStatusToDto(repoTask.analysisStatus),
|
||||
resultCount: repoTask.resultCount,
|
||||
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||
failureMessage: repoTask.failureMessage,
|
||||
@@ -27,22 +27,22 @@ export function mapRepoTaskToData(
|
||||
};
|
||||
}
|
||||
|
||||
function mapRepoTaskAnalysisStatusToData(
|
||||
function mapRepoTaskAnalysisStatusToDto(
|
||||
analysisStatus: VariantAnalysisRepoStatus,
|
||||
): VariantAnalysisRepoStatusData {
|
||||
): VariantAnalysisRepoStatusDto {
|
||||
switch (analysisStatus) {
|
||||
case VariantAnalysisRepoStatus.Pending:
|
||||
return VariantAnalysisRepoStatusData.Pending;
|
||||
return VariantAnalysisRepoStatusDto.Pending;
|
||||
case VariantAnalysisRepoStatus.InProgress:
|
||||
return VariantAnalysisRepoStatusData.InProgress;
|
||||
return VariantAnalysisRepoStatusDto.InProgress;
|
||||
case VariantAnalysisRepoStatus.Succeeded:
|
||||
return VariantAnalysisRepoStatusData.Succeeded;
|
||||
return VariantAnalysisRepoStatusDto.Succeeded;
|
||||
case VariantAnalysisRepoStatus.Failed:
|
||||
return VariantAnalysisRepoStatusData.Failed;
|
||||
return VariantAnalysisRepoStatusDto.Failed;
|
||||
case VariantAnalysisRepoStatus.Canceled:
|
||||
return VariantAnalysisRepoStatusData.Canceled;
|
||||
return VariantAnalysisRepoStatusDto.Canceled;
|
||||
case VariantAnalysisRepoStatus.TimedOut:
|
||||
return VariantAnalysisRepoStatusData.TimedOut;
|
||||
return VariantAnalysisRepoStatusDto.TimedOut;
|
||||
default:
|
||||
assertNever(analysisStatus);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
export interface VariantAnalysisRepositoryTaskData {
|
||||
repository: RepositoryData;
|
||||
analysisStatus: VariantAnalysisRepoStatusData;
|
||||
export interface VariantAnalysisRepositoryTaskDto {
|
||||
repository: RepositoryDto;
|
||||
analysisStatus: VariantAnalysisRepoStatusDto;
|
||||
resultCount?: number;
|
||||
artifactSizeInBytes?: number;
|
||||
failureMessage?: string;
|
||||
@@ -9,13 +9,13 @@ export interface VariantAnalysisRepositoryTaskData {
|
||||
artifactUrl?: string;
|
||||
}
|
||||
|
||||
interface RepositoryData {
|
||||
interface RepositoryDto {
|
||||
id: number;
|
||||
fullName: string;
|
||||
private: boolean;
|
||||
}
|
||||
|
||||
export enum VariantAnalysisRepoStatusData {
|
||||
export enum VariantAnalysisRepoStatusDto {
|
||||
Pending = "pending",
|
||||
InProgress = "inProgress",
|
||||
Succeeded = "succeeded",
|
||||
@@ -1,8 +1,8 @@
|
||||
import { outputJson, readJson } from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { VariantAnalysisRepositoryTask } from "../shared/variant-analysis";
|
||||
import { mapRepoTaskToData } from "./repo-task-to-data-mapper";
|
||||
import { mapRepoTaskToDomain } from "./repo-task-to-domain-mapper";
|
||||
import { mapRepoTaskToDto } from "./repo-tasks-dto-mapper";
|
||||
import { mapRepoTaskToDomainModel } from "./repo-tasks-domain-mapper";
|
||||
|
||||
export const REPO_TASK_FILENAME = "repo_task.json";
|
||||
|
||||
@@ -10,7 +10,7 @@ export async function writeRepoTask(
|
||||
storageDirectory: string,
|
||||
repoTask: VariantAnalysisRepositoryTask,
|
||||
): Promise<void> {
|
||||
const repoTaskData = mapRepoTaskToData(repoTask);
|
||||
const repoTaskData = mapRepoTaskToDto(repoTask);
|
||||
await outputJson(join(storageDirectory, REPO_TASK_FILENAME), repoTaskData);
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@ export async function readRepoTask(
|
||||
const repoTaskData = await readJson(
|
||||
join(storageDirectory, REPO_TASK_FILENAME),
|
||||
);
|
||||
return mapRepoTaskToDomain(repoTaskData);
|
||||
return mapRepoTaskToDomainModel(repoTaskData);
|
||||
}
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
import { DisposableObject, DisposeHandler } from "../pure/disposable-object";
|
||||
import { EventEmitter } from "vscode";
|
||||
import { unzipFile } from "../pure/zip";
|
||||
import { readRepoTask, writeRepoTask } from "./store/repo-task-store";
|
||||
import { readRepoTask, writeRepoTask } from "./repo-tasks-store";
|
||||
|
||||
type CacheKey = `${number}/${string}`;
|
||||
|
||||
|
||||
@@ -149,14 +149,14 @@ describe("loadDataExtensionYaml", () => {
|
||||
});
|
||||
|
||||
it("returns undefined if given a string", () => {
|
||||
const data = loadDataExtensionYaml(`extensions:
|
||||
expect(() =>
|
||||
loadDataExtensionYaml(`extensions:
|
||||
- addsTo:
|
||||
pack: codeql/java-all
|
||||
extensible: sinkModel
|
||||
data:
|
||||
- ["org.sql2o","Connection",true,"createQuery","(String)","","Argument[0]","sql","manual"]
|
||||
`);
|
||||
|
||||
expect(data).toBeUndefined();
|
||||
`),
|
||||
).toThrow("Invalid data extension YAML: must be object");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -53,7 +53,7 @@ describe("Db panel UI commands", () => {
|
||||
it.skip("should add new local db list", async () => {
|
||||
// Add db list
|
||||
jest.spyOn(window, "showQuickPick").mockResolvedValue({
|
||||
kind: DbListKind.Local,
|
||||
databaseKind: DbListKind.Local,
|
||||
} as AddListQuickPickItem);
|
||||
jest.spyOn(window, "showInputBox").mockResolvedValue("my-list-1");
|
||||
await commandManager.execute(
|
||||
@@ -73,7 +73,7 @@ describe("Db panel UI commands", () => {
|
||||
it("should add new remote repository", async () => {
|
||||
// Add db
|
||||
jest.spyOn(window, "showQuickPick").mockResolvedValue({
|
||||
kind: "repo",
|
||||
remoteDatabaseKind: "repo",
|
||||
} as RemoteDatabaseQuickPickItem);
|
||||
|
||||
jest.spyOn(window, "showInputBox").mockResolvedValue("owner1/repo1");
|
||||
@@ -96,7 +96,7 @@ describe("Db panel UI commands", () => {
|
||||
it("should add new remote owner", async () => {
|
||||
// Add owner
|
||||
jest.spyOn(window, "showQuickPick").mockResolvedValue({
|
||||
kind: "owner",
|
||||
remoteDatabaseKind: "owner",
|
||||
} as RemoteDatabaseQuickPickItem);
|
||||
|
||||
jest.spyOn(window, "showInputBox").mockResolvedValue("owner1");
|
||||
|
||||
@@ -20,8 +20,9 @@ import {
|
||||
} from "../../../src/local-databases";
|
||||
import * as databaseFetcher from "../../../src/databaseFetcher";
|
||||
import { createMockDB } from "../../factories/databases/databases";
|
||||
import { asError } from "../../../src/pure/helpers-pure";
|
||||
|
||||
jest.setTimeout(40_000);
|
||||
jest.setTimeout(80_000);
|
||||
|
||||
describe("SkeletonQueryWizard", () => {
|
||||
let mockCli: CodeQLCliServer;
|
||||
@@ -366,8 +367,15 @@ describe("SkeletonQueryWizard", () => {
|
||||
describe("findDatabaseItemByNwo", () => {
|
||||
describe("when the item exists", () => {
|
||||
it("should return the database item", async () => {
|
||||
const mockDbItem = createMockDB(dir);
|
||||
const mockDbItem2 = createMockDB(dir);
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 123,
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest.spyOn(mockDbItem, "name", "get").mockReturnValue("mock-name");
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByNwo(
|
||||
mockDbItem.language,
|
||||
@@ -375,8 +383,40 @@ describe("SkeletonQueryWizard", () => {
|
||||
[mockDbItem, mockDbItem2],
|
||||
);
|
||||
|
||||
expect(databaseItem!.language).toEqual(mockDbItem.language);
|
||||
expect(databaseItem!.name).toEqual(mockDbItem.name);
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem),
|
||||
);
|
||||
});
|
||||
|
||||
it("should ignore databases with errors", async () => {
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 123,
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem3 = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
dateAdded: 345,
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest.spyOn(mockDbItem, "name", "get").mockReturnValue("mock-name");
|
||||
jest.spyOn(mockDbItem3, "name", "get").mockReturnValue(mockDbItem.name);
|
||||
|
||||
jest
|
||||
.spyOn(mockDbItem, "error", "get")
|
||||
.mockReturnValue(asError("database go boom!"));
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByNwo(
|
||||
mockDbItem.language,
|
||||
mockDbItem.name,
|
||||
[mockDbItem, mockDbItem2, mockDbItem3],
|
||||
);
|
||||
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem3),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -413,6 +453,32 @@ describe("SkeletonQueryWizard", () => {
|
||||
|
||||
expect(databaseItem).toEqual(mockDbItem);
|
||||
});
|
||||
|
||||
it("should ignore databases with errors", async () => {
|
||||
const mockDbItem = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem2 = createMockDB(dir, {
|
||||
language: "javascript",
|
||||
} as FullDatabaseOptions);
|
||||
const mockDbItem3 = createMockDB(dir, {
|
||||
language: "ruby",
|
||||
} as FullDatabaseOptions);
|
||||
|
||||
jest
|
||||
.spyOn(mockDbItem, "error", "get")
|
||||
.mockReturnValue(asError("database go boom!"));
|
||||
|
||||
const databaseItem = await wizard.findDatabaseItemByLanguage("ruby", [
|
||||
mockDbItem,
|
||||
mockDbItem2,
|
||||
mockDbItem3,
|
||||
]);
|
||||
|
||||
expect(JSON.stringify(databaseItem)).toEqual(
|
||||
JSON.stringify(mockDbItem3),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when the item doesn't exist", () => {
|
||||
|
||||
@@ -12,6 +12,8 @@ import { readdir, readFile } from "fs-extra";
|
||||
import { load } from "js-yaml";
|
||||
import { dirname, join } from "path";
|
||||
import { fetchExternalApisQuery } from "../../../../src/data-extensions-editor/queries/java";
|
||||
import * as helpers from "../../../../src/helpers";
|
||||
import { RedactableError } from "../../../../src/pure/errors";
|
||||
|
||||
function createMockUri(path = "/a/b/c/foo"): Uri {
|
||||
return {
|
||||
@@ -132,17 +134,27 @@ describe("readQueryResults", () => {
|
||||
bqrsDecode: jest.fn(),
|
||||
},
|
||||
bqrsPath: "/tmp/results.bqrs",
|
||||
logger: createMockLogger(),
|
||||
};
|
||||
|
||||
let showAndLogExceptionWithTelemetrySpy: jest.SpiedFunction<
|
||||
typeof helpers.showAndLogExceptionWithTelemetry
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
showAndLogExceptionWithTelemetrySpy = jest.spyOn(
|
||||
helpers,
|
||||
"showAndLogExceptionWithTelemetry",
|
||||
);
|
||||
});
|
||||
|
||||
it("returns undefined when there are no results", async () => {
|
||||
options.cliServer.bqrsInfo.mockResolvedValue({
|
||||
"result-sets": [],
|
||||
});
|
||||
|
||||
expect(await readQueryResults(options)).toBeUndefined();
|
||||
expect(options.logger.log).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/Expected exactly one result set/),
|
||||
expect(showAndLogExceptionWithTelemetrySpy).toHaveBeenCalledWith(
|
||||
expect.any(RedactableError),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -171,8 +183,8 @@ describe("readQueryResults", () => {
|
||||
});
|
||||
|
||||
expect(await readQueryResults(options)).toBeUndefined();
|
||||
expect(options.logger.log).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/Expected exactly one result set/),
|
||||
expect(showAndLogExceptionWithTelemetrySpy).toHaveBeenCalledWith(
|
||||
expect.any(RedactableError),
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -272,6 +272,13 @@ describe("helpers", () => {
|
||||
class MockEnvironmentVariableCollection
|
||||
implements EnvironmentVariableCollection
|
||||
{
|
||||
[Symbol.iterator](): Iterator<
|
||||
[variable: string, mutator: EnvironmentVariableMutator],
|
||||
any,
|
||||
undefined
|
||||
> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
persistent = false;
|
||||
replace(_variable: string, _value: string): void {
|
||||
throw new Error("Method not implemented.");
|
||||
|
||||
Reference in New Issue
Block a user