Merge remote-tracking branch 'origin/main' into koesie10/data-extension-editor-cli-tests
This commit is contained in:
@@ -7,12 +7,14 @@ import { DatabaseManager } from "../local-databases";
|
|||||||
import { extLogger } from "../common";
|
import { extLogger } from "../common";
|
||||||
import { ensureDir } from "fs-extra";
|
import { ensureDir } from "fs-extra";
|
||||||
import { join } from "path";
|
import { join } from "path";
|
||||||
|
import { App } from "../common/app";
|
||||||
|
|
||||||
export class DataExtensionsEditorModule {
|
export class DataExtensionsEditorModule {
|
||||||
private readonly queryStorageDir: string;
|
private readonly queryStorageDir: string;
|
||||||
|
|
||||||
private constructor(
|
private constructor(
|
||||||
private readonly ctx: ExtensionContext,
|
private readonly ctx: ExtensionContext,
|
||||||
|
private readonly app: App,
|
||||||
private readonly databaseManager: DatabaseManager,
|
private readonly databaseManager: DatabaseManager,
|
||||||
private readonly cliServer: CodeQLCliServer,
|
private readonly cliServer: CodeQLCliServer,
|
||||||
private readonly queryRunner: QueryRunner,
|
private readonly queryRunner: QueryRunner,
|
||||||
@@ -26,6 +28,7 @@ export class DataExtensionsEditorModule {
|
|||||||
|
|
||||||
public static async initialize(
|
public static async initialize(
|
||||||
ctx: ExtensionContext,
|
ctx: ExtensionContext,
|
||||||
|
app: App,
|
||||||
databaseManager: DatabaseManager,
|
databaseManager: DatabaseManager,
|
||||||
cliServer: CodeQLCliServer,
|
cliServer: CodeQLCliServer,
|
||||||
queryRunner: QueryRunner,
|
queryRunner: QueryRunner,
|
||||||
@@ -33,6 +36,7 @@ export class DataExtensionsEditorModule {
|
|||||||
): Promise<DataExtensionsEditorModule> {
|
): Promise<DataExtensionsEditorModule> {
|
||||||
const dataExtensionsEditorModule = new DataExtensionsEditorModule(
|
const dataExtensionsEditorModule = new DataExtensionsEditorModule(
|
||||||
ctx,
|
ctx,
|
||||||
|
app,
|
||||||
databaseManager,
|
databaseManager,
|
||||||
cliServer,
|
cliServer,
|
||||||
queryRunner,
|
queryRunner,
|
||||||
@@ -54,6 +58,8 @@ export class DataExtensionsEditorModule {
|
|||||||
|
|
||||||
const view = new DataExtensionsEditorView(
|
const view = new DataExtensionsEditorView(
|
||||||
this.ctx,
|
this.ctx,
|
||||||
|
this.app,
|
||||||
|
this.databaseManager,
|
||||||
this.cliServer,
|
this.cliServer,
|
||||||
this.queryRunner,
|
this.queryRunner,
|
||||||
this.queryStorageDir,
|
this.queryStorageDir,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import {
|
|||||||
ViewColumn,
|
ViewColumn,
|
||||||
window,
|
window,
|
||||||
workspace,
|
workspace,
|
||||||
|
WorkspaceFolder,
|
||||||
} from "vscode";
|
} from "vscode";
|
||||||
import { AbstractWebview, WebviewPanelConfig } from "../abstract-webview";
|
import { AbstractWebview, WebviewPanelConfig } from "../abstract-webview";
|
||||||
import {
|
import {
|
||||||
@@ -20,9 +21,12 @@ import {
|
|||||||
import { extLogger } from "../common";
|
import { extLogger } from "../common";
|
||||||
import { readFile, writeFile } from "fs-extra";
|
import { readFile, writeFile } from "fs-extra";
|
||||||
import { load as loadYaml } from "js-yaml";
|
import { load as loadYaml } from "js-yaml";
|
||||||
import { DatabaseItem } from "../local-databases";
|
import { DatabaseItem, DatabaseManager } from "../local-databases";
|
||||||
import { CodeQLCliServer } from "../cli";
|
import { CodeQLCliServer } from "../cli";
|
||||||
import { asError, assertNever, getErrorMessage } from "../pure/helpers-pure";
|
import { asError, assertNever, getErrorMessage } from "../pure/helpers-pure";
|
||||||
|
import { generateFlowModel } from "./generate-flow-model";
|
||||||
|
import { promptImportGithubDatabase } from "../databaseFetcher";
|
||||||
|
import { App } from "../common/app";
|
||||||
import { ResolvableLocationValue } from "../pure/bqrs-cli-types";
|
import { ResolvableLocationValue } from "../pure/bqrs-cli-types";
|
||||||
import { showResolvableLocation } from "../interface-utils";
|
import { showResolvableLocation } from "../interface-utils";
|
||||||
import { decodeBqrsToExternalApiUsages } from "./bqrs";
|
import { decodeBqrsToExternalApiUsages } from "./bqrs";
|
||||||
@@ -32,12 +36,27 @@ import { createDataExtensionYaml, loadDataExtensionYaml } from "./yaml";
|
|||||||
import { ExternalApiUsage } from "./external-api-usage";
|
import { ExternalApiUsage } from "./external-api-usage";
|
||||||
import { ModeledMethod } from "./modeled-method";
|
import { ModeledMethod } from "./modeled-method";
|
||||||
|
|
||||||
|
function getQlSubmoduleFolder(): WorkspaceFolder | undefined {
|
||||||
|
const workspaceFolder = workspace.workspaceFolders?.find(
|
||||||
|
(folder) => folder.name === "ql",
|
||||||
|
);
|
||||||
|
if (!workspaceFolder) {
|
||||||
|
void extLogger.log("No workspace folder 'ql' found");
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return workspaceFolder;
|
||||||
|
}
|
||||||
|
|
||||||
export class DataExtensionsEditorView extends AbstractWebview<
|
export class DataExtensionsEditorView extends AbstractWebview<
|
||||||
ToDataExtensionsEditorMessage,
|
ToDataExtensionsEditorMessage,
|
||||||
FromDataExtensionsEditorMessage
|
FromDataExtensionsEditorMessage
|
||||||
> {
|
> {
|
||||||
public constructor(
|
public constructor(
|
||||||
ctx: ExtensionContext,
|
ctx: ExtensionContext,
|
||||||
|
private readonly app: App,
|
||||||
|
private readonly databaseManager: DatabaseManager,
|
||||||
private readonly cliServer: CodeQLCliServer,
|
private readonly cliServer: CodeQLCliServer,
|
||||||
private readonly queryRunner: QueryRunner,
|
private readonly queryRunner: QueryRunner,
|
||||||
private readonly queryStorageDir: string,
|
private readonly queryStorageDir: string,
|
||||||
@@ -86,6 +105,10 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
);
|
);
|
||||||
await this.loadExternalApiUsages();
|
await this.loadExternalApiUsages();
|
||||||
|
|
||||||
|
break;
|
||||||
|
case "generateExternalApi":
|
||||||
|
await this.generateModeledMethods();
|
||||||
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
assertNever(msg);
|
assertNever(msg);
|
||||||
@@ -158,8 +181,8 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
}
|
}
|
||||||
|
|
||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
t: "setExistingModeledMethods",
|
t: "addModeledMethods",
|
||||||
existingModeledMethods,
|
modeledMethods: existingModeledMethods,
|
||||||
});
|
});
|
||||||
} catch (e: unknown) {
|
} catch (e: unknown) {
|
||||||
void extLogger.log(`Unable to read data extension YAML: ${e}`);
|
void extLogger.log(`Unable to read data extension YAML: ${e}`);
|
||||||
@@ -225,6 +248,92 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected async generateModeledMethods(): Promise<void> {
|
||||||
|
const tokenSource = new CancellationTokenSource();
|
||||||
|
|
||||||
|
const selectedDatabase = this.databaseManager.currentDatabaseItem;
|
||||||
|
|
||||||
|
// The external API methods are in the library source code, so we need to ask
|
||||||
|
// the user to import the library database. We need to have the database
|
||||||
|
// imported to the query server, so we need to register it to our workspace.
|
||||||
|
const database = await promptImportGithubDatabase(
|
||||||
|
this.app.commands,
|
||||||
|
this.databaseManager,
|
||||||
|
this.app.workspaceStoragePath ?? this.app.globalStoragePath,
|
||||||
|
this.app.credentials,
|
||||||
|
(update) => this.showProgress(update),
|
||||||
|
tokenSource.token,
|
||||||
|
this.cliServer,
|
||||||
|
);
|
||||||
|
if (!database) {
|
||||||
|
await this.clearProgress();
|
||||||
|
void extLogger.log("No database chosen");
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The library database was set as the current database by importing it,
|
||||||
|
// but we need to set it back to the originally selected database.
|
||||||
|
await this.databaseManager.setCurrentDatabaseItem(selectedDatabase);
|
||||||
|
|
||||||
|
const workspaceFolder = getQlSubmoduleFolder();
|
||||||
|
if (!workspaceFolder) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.showProgress({
|
||||||
|
step: 0,
|
||||||
|
maxStep: 4000,
|
||||||
|
message: "Generating modeled methods for library",
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
await generateFlowModel({
|
||||||
|
cliServer: this.cliServer,
|
||||||
|
queryRunner: this.queryRunner,
|
||||||
|
queryStorageDir: this.queryStorageDir,
|
||||||
|
qlDir: workspaceFolder.uri.fsPath,
|
||||||
|
databaseItem: database,
|
||||||
|
onResults: async (results) => {
|
||||||
|
const modeledMethodsByName: Record<string, ModeledMethod> = {};
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
modeledMethodsByName[result.signature] = result.modeledMethod;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.postMessage({
|
||||||
|
t: "addModeledMethods",
|
||||||
|
modeledMethods: modeledMethodsByName,
|
||||||
|
overrideNone: true,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
progress: (update) => this.showProgress(update),
|
||||||
|
token: tokenSource.token,
|
||||||
|
});
|
||||||
|
} catch (e: unknown) {
|
||||||
|
void showAndLogExceptionWithTelemetry(
|
||||||
|
redactableError(
|
||||||
|
asError(e),
|
||||||
|
)`Failed to generate flow model: ${getErrorMessage(e)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// After the flow model has been generated, we can remove the temporary database
|
||||||
|
// which we used for generating the flow model.
|
||||||
|
await this.databaseManager.removeDatabaseItem(
|
||||||
|
() =>
|
||||||
|
this.showProgress({
|
||||||
|
step: 3900,
|
||||||
|
maxStep: 4000,
|
||||||
|
message: "Removing temporary database",
|
||||||
|
}),
|
||||||
|
tokenSource.token,
|
||||||
|
database,
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.clearProgress();
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Progress in this class is a bit weird. Most of the progress is based on running the query.
|
* Progress in this class is a bit weird. Most of the progress is based on running the query.
|
||||||
* Query progress is always between 0 and 1000. However, we still have some steps that need
|
* Query progress is always between 0 and 1000. However, we still have some steps that need
|
||||||
@@ -232,6 +341,13 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
* that there's 1000 steps of the query progress since that takes the most time, and then
|
* that there's 1000 steps of the query progress since that takes the most time, and then
|
||||||
* an additional 500 steps for the rest of the work. The progress doesn't need to be 100%
|
* an additional 500 steps for the rest of the work. The progress doesn't need to be 100%
|
||||||
* accurate, so this is just a rough estimate.
|
* accurate, so this is just a rough estimate.
|
||||||
|
*
|
||||||
|
* For generating the modeled methods for an external library, the max step is 4000. This is
|
||||||
|
* based on the following steps:
|
||||||
|
* - 1000 for the summary model
|
||||||
|
* - 1000 for the sink model
|
||||||
|
* - 1000 for the source model
|
||||||
|
* - 1000 for the neutral model
|
||||||
*/
|
*/
|
||||||
private async showProgress(update: ProgressUpdate, maxStep?: number) {
|
private async showProgress(update: ProgressUpdate, maxStep?: number) {
|
||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
@@ -251,12 +367,8 @@ export class DataExtensionsEditorView extends AbstractWebview<
|
|||||||
}
|
}
|
||||||
|
|
||||||
private calculateModelFilename(): string | undefined {
|
private calculateModelFilename(): string | undefined {
|
||||||
const workspaceFolder = workspace.workspaceFolders?.find(
|
const workspaceFolder = getQlSubmoduleFolder();
|
||||||
(folder) => folder.name === "ql",
|
|
||||||
);
|
|
||||||
if (!workspaceFolder) {
|
if (!workspaceFolder) {
|
||||||
void extLogger.log("No workspace folder 'ql' found");
|
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,141 @@
|
|||||||
|
import { CancellationToken } from "vscode";
|
||||||
|
import { DatabaseItem } from "../local-databases";
|
||||||
|
import { join } from "path";
|
||||||
|
import { QueryRunner } from "../queryRunner";
|
||||||
|
import { CodeQLCliServer } from "../cli";
|
||||||
|
import { TeeLogger } from "../common";
|
||||||
|
import { extensiblePredicateDefinitions } from "./yaml";
|
||||||
|
import { ProgressCallback } from "../progress";
|
||||||
|
import { getOnDiskWorkspaceFolders } from "../helpers";
|
||||||
|
import {
|
||||||
|
ModeledMethodType,
|
||||||
|
ModeledMethodWithSignature,
|
||||||
|
} from "./modeled-method";
|
||||||
|
|
||||||
|
type FlowModelOptions = {
|
||||||
|
cliServer: CodeQLCliServer;
|
||||||
|
queryRunner: QueryRunner;
|
||||||
|
queryStorageDir: string;
|
||||||
|
qlDir: string;
|
||||||
|
databaseItem: DatabaseItem;
|
||||||
|
progress: ProgressCallback;
|
||||||
|
token: CancellationToken;
|
||||||
|
onResults: (results: ModeledMethodWithSignature[]) => void | Promise<void>;
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getModeledMethodsFromFlow(
|
||||||
|
type: Exclude<ModeledMethodType, "none">,
|
||||||
|
queryName: string,
|
||||||
|
queryStep: number,
|
||||||
|
{
|
||||||
|
cliServer,
|
||||||
|
queryRunner,
|
||||||
|
queryStorageDir,
|
||||||
|
qlDir,
|
||||||
|
databaseItem,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
}: Omit<FlowModelOptions, "onResults">,
|
||||||
|
): Promise<ModeledMethodWithSignature[]> {
|
||||||
|
const definition = extensiblePredicateDefinitions[type];
|
||||||
|
|
||||||
|
const query = join(
|
||||||
|
qlDir,
|
||||||
|
databaseItem.language,
|
||||||
|
"ql/src/utils/modelgenerator",
|
||||||
|
queryName,
|
||||||
|
);
|
||||||
|
|
||||||
|
const queryRun = queryRunner.createQueryRun(
|
||||||
|
databaseItem.databaseUri.fsPath,
|
||||||
|
{ queryPath: query, quickEvalPosition: undefined },
|
||||||
|
false,
|
||||||
|
getOnDiskWorkspaceFolders(),
|
||||||
|
undefined,
|
||||||
|
queryStorageDir,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
const queryResult = await queryRun.evaluate(
|
||||||
|
({ step, message }) =>
|
||||||
|
progress({
|
||||||
|
message: `Generating ${type} model: ${message}`,
|
||||||
|
step: queryStep * 1000 + step,
|
||||||
|
maxStep: 4000,
|
||||||
|
}),
|
||||||
|
token,
|
||||||
|
new TeeLogger(queryRunner.logger, queryRun.outputDir.logPath),
|
||||||
|
);
|
||||||
|
|
||||||
|
const bqrsPath = queryResult.outputDir.bqrsPath;
|
||||||
|
|
||||||
|
const bqrsInfo = await cliServer.bqrsInfo(bqrsPath);
|
||||||
|
if (bqrsInfo["result-sets"].length !== 1) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected exactly one result set, got ${bqrsInfo["result-sets"].length}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const resultSet = bqrsInfo["result-sets"][0];
|
||||||
|
|
||||||
|
const decodedResults = await cliServer.bqrsDecode(bqrsPath, resultSet.name);
|
||||||
|
|
||||||
|
const results = decodedResults.tuples;
|
||||||
|
|
||||||
|
return (
|
||||||
|
results
|
||||||
|
// This is just a sanity check. The query should only return strings.
|
||||||
|
.filter((result) => typeof result[0] === "string")
|
||||||
|
.map((result) => {
|
||||||
|
const row = result[0] as string;
|
||||||
|
|
||||||
|
return definition.readModeledMethod(row.split(";"));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function generateFlowModel({
|
||||||
|
onResults,
|
||||||
|
...options
|
||||||
|
}: FlowModelOptions) {
|
||||||
|
const summaryResults = await getModeledMethodsFromFlow(
|
||||||
|
"summary",
|
||||||
|
"CaptureSummaryModels.ql",
|
||||||
|
0,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
if (summaryResults) {
|
||||||
|
await onResults(summaryResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sinkResults = await getModeledMethodsFromFlow(
|
||||||
|
"sink",
|
||||||
|
"CaptureSinkModels.ql",
|
||||||
|
1,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
if (sinkResults) {
|
||||||
|
await onResults(sinkResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sourceResults = await getModeledMethodsFromFlow(
|
||||||
|
"source",
|
||||||
|
"CaptureSourceModels.ql",
|
||||||
|
2,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
if (sourceResults) {
|
||||||
|
await onResults(sourceResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
const neutralResults = await getModeledMethodsFromFlow(
|
||||||
|
"neutral",
|
||||||
|
"CaptureNeutralModels.ql",
|
||||||
|
3,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
if (neutralResults) {
|
||||||
|
await onResults(neutralResults);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -11,3 +11,8 @@ export type ModeledMethod = {
|
|||||||
output: string;
|
output: string;
|
||||||
kind: string;
|
kind: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type ModeledMethodWithSignature = {
|
||||||
|
signature: string;
|
||||||
|
modeledMethod: ModeledMethod;
|
||||||
|
};
|
||||||
|
|||||||
@@ -1,27 +1,31 @@
|
|||||||
import { ExternalApiUsage } from "./external-api-usage";
|
import { ExternalApiUsage } from "./external-api-usage";
|
||||||
import { ModeledMethod, ModeledMethodType } from "./modeled-method";
|
import {
|
||||||
|
ModeledMethod,
|
||||||
|
ModeledMethodType,
|
||||||
|
ModeledMethodWithSignature,
|
||||||
|
} from "./modeled-method";
|
||||||
|
|
||||||
type ExternalApiUsageByType = {
|
type ExternalApiUsageByType = {
|
||||||
externalApiUsage: ExternalApiUsage;
|
externalApiUsage: ExternalApiUsage;
|
||||||
modeledMethod: ModeledMethod;
|
modeledMethod: ModeledMethod;
|
||||||
};
|
};
|
||||||
|
|
||||||
type DataExtensionDefinition = {
|
type ExtensiblePredicateDefinition = {
|
||||||
extensible: string;
|
extensiblePredicate: string;
|
||||||
generateMethodDefinition: (method: ExternalApiUsageByType) => any[];
|
generateMethodDefinition: (method: ExternalApiUsageByType) => any[];
|
||||||
readModeledMethod: (row: any[]) => [string, ModeledMethod] | undefined;
|
readModeledMethod: (row: any[]) => ModeledMethodWithSignature;
|
||||||
};
|
};
|
||||||
|
|
||||||
function readRowToMethod(row: any[]): string {
|
function readRowToMethod(row: any[]): string {
|
||||||
return `${row[0]}.${row[1]}#${row[3]}${row[4]}`;
|
return `${row[0]}.${row[1]}#${row[3]}${row[4]}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const definitions: Record<
|
export const extensiblePredicateDefinitions: Record<
|
||||||
Exclude<ModeledMethodType, "none">,
|
Exclude<ModeledMethodType, "none">,
|
||||||
DataExtensionDefinition
|
ExtensiblePredicateDefinition
|
||||||
> = {
|
> = {
|
||||||
source: {
|
source: {
|
||||||
extensible: "sourceModel",
|
extensiblePredicate: "sourceModel",
|
||||||
// extensible predicate sourceModel(
|
// extensible predicate sourceModel(
|
||||||
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
||||||
// string output, string kind, string provenance
|
// string output, string kind, string provenance
|
||||||
@@ -37,18 +41,18 @@ const definitions: Record<
|
|||||||
method.modeledMethod.kind,
|
method.modeledMethod.kind,
|
||||||
"manual",
|
"manual",
|
||||||
],
|
],
|
||||||
readModeledMethod: (row) => [
|
readModeledMethod: (row) => ({
|
||||||
readRowToMethod(row),
|
signature: readRowToMethod(row),
|
||||||
{
|
modeledMethod: {
|
||||||
type: "source",
|
type: "source",
|
||||||
input: "",
|
input: "",
|
||||||
output: row[6],
|
output: row[6],
|
||||||
kind: row[7],
|
kind: row[7],
|
||||||
},
|
},
|
||||||
],
|
}),
|
||||||
},
|
},
|
||||||
sink: {
|
sink: {
|
||||||
extensible: "sinkModel",
|
extensiblePredicate: "sinkModel",
|
||||||
// extensible predicate sinkModel(
|
// extensible predicate sinkModel(
|
||||||
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
||||||
// string input, string kind, string provenance
|
// string input, string kind, string provenance
|
||||||
@@ -64,18 +68,18 @@ const definitions: Record<
|
|||||||
method.modeledMethod.kind,
|
method.modeledMethod.kind,
|
||||||
"manual",
|
"manual",
|
||||||
],
|
],
|
||||||
readModeledMethod: (row) => [
|
readModeledMethod: (row) => ({
|
||||||
readRowToMethod(row),
|
signature: readRowToMethod(row),
|
||||||
{
|
modeledMethod: {
|
||||||
type: "sink",
|
type: "sink",
|
||||||
input: row[6],
|
input: row[6],
|
||||||
output: "",
|
output: "",
|
||||||
kind: row[7],
|
kind: row[7],
|
||||||
},
|
},
|
||||||
],
|
}),
|
||||||
},
|
},
|
||||||
summary: {
|
summary: {
|
||||||
extensible: "summaryModel",
|
extensiblePredicate: "summaryModel",
|
||||||
// extensible predicate summaryModel(
|
// extensible predicate summaryModel(
|
||||||
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
// string package, string type, boolean subtypes, string name, string signature, string ext,
|
||||||
// string input, string output, string kind, string provenance
|
// string input, string output, string kind, string provenance
|
||||||
@@ -92,18 +96,18 @@ const definitions: Record<
|
|||||||
method.modeledMethod.kind,
|
method.modeledMethod.kind,
|
||||||
"manual",
|
"manual",
|
||||||
],
|
],
|
||||||
readModeledMethod: (row) => [
|
readModeledMethod: (row) => ({
|
||||||
readRowToMethod(row),
|
signature: readRowToMethod(row),
|
||||||
{
|
modeledMethod: {
|
||||||
type: "summary",
|
type: "summary",
|
||||||
input: row[6],
|
input: row[6],
|
||||||
output: row[7],
|
output: row[7],
|
||||||
kind: row[8],
|
kind: row[8],
|
||||||
},
|
},
|
||||||
],
|
}),
|
||||||
},
|
},
|
||||||
neutral: {
|
neutral: {
|
||||||
extensible: "neutralModel",
|
extensiblePredicate: "neutralModel",
|
||||||
// extensible predicate neutralModel(
|
// extensible predicate neutralModel(
|
||||||
// string package, string type, string name, string signature, string provenance
|
// string package, string type, string name, string signature, string provenance
|
||||||
// );
|
// );
|
||||||
@@ -114,21 +118,21 @@ const definitions: Record<
|
|||||||
method.externalApiUsage.methodParameters,
|
method.externalApiUsage.methodParameters,
|
||||||
"manual",
|
"manual",
|
||||||
],
|
],
|
||||||
readModeledMethod: (row) => [
|
readModeledMethod: (row) => ({
|
||||||
`${row[0]}.${row[1]}#${row[2]}${row[3]}`,
|
signature: `${row[0]}.${row[1]}#${row[2]}${row[3]}`,
|
||||||
{
|
modeledMethod: {
|
||||||
type: "neutral",
|
type: "neutral",
|
||||||
input: "",
|
input: "",
|
||||||
output: "",
|
output: "",
|
||||||
kind: "",
|
kind: "",
|
||||||
},
|
},
|
||||||
],
|
}),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
function createDataProperty(
|
function createDataProperty(
|
||||||
methods: ExternalApiUsageByType[],
|
methods: ExternalApiUsageByType[],
|
||||||
definition: DataExtensionDefinition,
|
definition: ExtensiblePredicateDefinition,
|
||||||
) {
|
) {
|
||||||
if (methods.length === 0) {
|
if (methods.length === 0) {
|
||||||
return " []";
|
return " []";
|
||||||
@@ -169,10 +173,10 @@ export function createDataExtensionYaml(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const extensions = Object.entries(definitions).map(
|
const extensions = Object.entries(extensiblePredicateDefinitions).map(
|
||||||
([type, definition]) => ` - addsTo:
|
([type, definition]) => ` - addsTo:
|
||||||
pack: codeql/java-all
|
pack: codeql/java-all
|
||||||
extensible: ${definition.extensible}
|
extensible: ${definition.extensiblePredicate}
|
||||||
data:${createDataProperty(
|
data:${createDataProperty(
|
||||||
methodsByType[type as Exclude<ModeledMethodType, "none">],
|
methodsByType[type as Exclude<ModeledMethodType, "none">],
|
||||||
definition,
|
definition,
|
||||||
@@ -214,8 +218,8 @@ export function loadDataExtensionYaml(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const definition = Object.values(definitions).find(
|
const definition = Object.values(extensiblePredicateDefinitions).find(
|
||||||
(definition) => definition.extensible === extensible,
|
(definition) => definition.extensiblePredicate === extensible,
|
||||||
);
|
);
|
||||||
if (!definition) {
|
if (!definition) {
|
||||||
continue;
|
continue;
|
||||||
@@ -227,9 +231,9 @@ export function loadDataExtensionYaml(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [apiInfo, modeledMethod] = result;
|
const { signature, modeledMethod } = result;
|
||||||
|
|
||||||
modeledMethods[apiInfo] = modeledMethod;
|
modeledMethods[signature] = modeledMethod;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -868,6 +868,7 @@ async function activateWithInstalledDistribution(
|
|||||||
const dataExtensionsEditorModule =
|
const dataExtensionsEditorModule =
|
||||||
await DataExtensionsEditorModule.initialize(
|
await DataExtensionsEditorModule.initialize(
|
||||||
ctx,
|
ctx,
|
||||||
|
app,
|
||||||
dbm,
|
dbm,
|
||||||
cliServer,
|
cliServer,
|
||||||
qs,
|
qs,
|
||||||
|
|||||||
@@ -493,6 +493,19 @@ export interface ShowProgressMessage {
|
|||||||
message: string;
|
message: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AddModeledMethodsMessage {
|
||||||
|
t: "addModeledMethods";
|
||||||
|
modeledMethods: Record<string, ModeledMethod>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If true, then any existing modeled methods set to "none" will be
|
||||||
|
* overwritten by the new modeled methods. Otherwise, the "none" modeled
|
||||||
|
* methods will not be overwritten, even if the new modeled methods
|
||||||
|
* contain a better model.
|
||||||
|
*/
|
||||||
|
overrideNone?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface JumpToUsageMessage {
|
export interface JumpToUsageMessage {
|
||||||
t: "jumpToUsage";
|
t: "jumpToUsage";
|
||||||
location: ResolvableLocationValue;
|
location: ResolvableLocationValue;
|
||||||
@@ -509,12 +522,17 @@ export interface SaveModeledMethods {
|
|||||||
modeledMethods: Record<string, ModeledMethod>;
|
modeledMethods: Record<string, ModeledMethod>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface GenerateExternalApiMessage {
|
||||||
|
t: "generateExternalApi";
|
||||||
|
}
|
||||||
|
|
||||||
export type ToDataExtensionsEditorMessage =
|
export type ToDataExtensionsEditorMessage =
|
||||||
| SetExternalApiUsagesMessage
|
| SetExternalApiUsagesMessage
|
||||||
| ShowProgressMessage
|
| ShowProgressMessage
|
||||||
| SetExistingModeledMethods;
|
| AddModeledMethodsMessage;
|
||||||
|
|
||||||
export type FromDataExtensionsEditorMessage =
|
export type FromDataExtensionsEditorMessage =
|
||||||
| ViewLoadedMsg
|
| ViewLoadedMsg
|
||||||
| JumpToUsageMessage
|
| JumpToUsageMessage
|
||||||
| SaveModeledMethods;
|
| SaveModeledMethods
|
||||||
|
| GenerateExternalApiMessage;
|
||||||
|
|||||||
@@ -0,0 +1,12 @@
|
|||||||
|
export interface VariantAnalysisScannedRepositoryStateData {
|
||||||
|
repositoryId: number;
|
||||||
|
downloadStatus: VariantAnalysisScannedRepositoryDownloadData;
|
||||||
|
downloadPercentage?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum VariantAnalysisScannedRepositoryDownloadData {
|
||||||
|
Pending = "pending",
|
||||||
|
InProgress = "inProgress",
|
||||||
|
Succeeded = "succeeded",
|
||||||
|
Failed = "failed",
|
||||||
|
}
|
||||||
@@ -1,17 +1,44 @@
|
|||||||
import { outputJson, readJson } from "fs-extra";
|
import { outputJson, readJson } from "fs-extra";
|
||||||
import { VariantAnalysisScannedRepositoryState } from "../shared/variant-analysis";
|
import { VariantAnalysisScannedRepositoryState } from "../shared/variant-analysis";
|
||||||
|
import { VariantAnalysisScannedRepositoryStateData } from "./repo-states-data-types";
|
||||||
|
import { mapRepoStateToData } from "./repo-states-to-data-mapper";
|
||||||
|
import { mapRepoStateToDomain } from "./repo-states-to-domain-mapper";
|
||||||
|
|
||||||
export const REPO_STATES_FILENAME = "repo_states.json";
|
export const REPO_STATES_FILENAME = "repo_states.json";
|
||||||
|
|
||||||
export async function writeRepoStates(
|
export async function writeRepoStates(
|
||||||
storagePath: string,
|
storagePath: string,
|
||||||
repoStates: Record<number, VariantAnalysisScannedRepositoryState> | undefined,
|
repoStates: Record<number, VariantAnalysisScannedRepositoryState>,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return await outputJson(storagePath, repoStates);
|
// Map from repoStates Domain type to the repoStates Data type
|
||||||
|
const repoStatesData = Object.fromEntries(
|
||||||
|
Object.entries(repoStates).map(([key, value]) => {
|
||||||
|
return [key, mapRepoStateToData(value)];
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
await outputJson(storagePath, repoStatesData);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function readRepoStates(
|
export async function readRepoStates(
|
||||||
storagePath: string,
|
storagePath: string,
|
||||||
): Promise<Record<number, VariantAnalysisScannedRepositoryState>> {
|
): Promise<Record<number, VariantAnalysisScannedRepositoryState> | undefined> {
|
||||||
return await readJson(storagePath);
|
try {
|
||||||
|
const repoStatesData: Record<
|
||||||
|
number,
|
||||||
|
VariantAnalysisScannedRepositoryStateData
|
||||||
|
> = await readJson(storagePath);
|
||||||
|
|
||||||
|
// Map from repoStates Data type to the repoStates Domain type
|
||||||
|
const repoStates = Object.fromEntries(
|
||||||
|
Object.entries(repoStatesData).map(([key, value]) => {
|
||||||
|
return [key, mapRepoStateToDomain(value)];
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return repoStates;
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore this error, we simply might not have downloaded anything yet
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,36 @@
|
|||||||
|
import { assertNever } from "../../pure/helpers-pure";
|
||||||
|
import {
|
||||||
|
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||||
|
VariantAnalysisScannedRepositoryState,
|
||||||
|
} from "../shared/variant-analysis";
|
||||||
|
import {
|
||||||
|
VariantAnalysisScannedRepositoryDownloadData,
|
||||||
|
VariantAnalysisScannedRepositoryStateData,
|
||||||
|
} from "./repo-states-data-types";
|
||||||
|
|
||||||
|
export function mapRepoStateToData(
|
||||||
|
repoState: VariantAnalysisScannedRepositoryState,
|
||||||
|
): VariantAnalysisScannedRepositoryStateData {
|
||||||
|
return {
|
||||||
|
repositoryId: repoState.repositoryId,
|
||||||
|
downloadStatus: processDownloadStatus(repoState.downloadStatus),
|
||||||
|
downloadPercentage: repoState.downloadPercentage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function processDownloadStatus(
|
||||||
|
downloadedStatus: VariantAnalysisScannedRepositoryDownloadStatus,
|
||||||
|
) {
|
||||||
|
switch (downloadedStatus) {
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadStatus.Pending:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadData.Pending;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadStatus.InProgress:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadData.InProgress;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadStatus.Succeeded:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadData.Succeeded;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadStatus.Failed:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadData.Failed;
|
||||||
|
default:
|
||||||
|
assertNever(downloadedStatus);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
import { assertNever } from "../../pure/helpers-pure";
|
||||||
|
import {
|
||||||
|
VariantAnalysisScannedRepositoryState,
|
||||||
|
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||||
|
} from "../shared/variant-analysis";
|
||||||
|
import {
|
||||||
|
VariantAnalysisScannedRepositoryStateData,
|
||||||
|
VariantAnalysisScannedRepositoryDownloadData,
|
||||||
|
} from "./repo-states-data-types";
|
||||||
|
|
||||||
|
export function mapRepoStateToDomain(
|
||||||
|
repoState: VariantAnalysisScannedRepositoryStateData,
|
||||||
|
): VariantAnalysisScannedRepositoryState {
|
||||||
|
return {
|
||||||
|
repositoryId: repoState.repositoryId,
|
||||||
|
downloadStatus: processDownloadStatus(repoState.downloadStatus),
|
||||||
|
downloadPercentage: repoState.downloadPercentage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function processDownloadStatus(
|
||||||
|
downloadedStatus: VariantAnalysisScannedRepositoryDownloadData,
|
||||||
|
) {
|
||||||
|
switch (downloadedStatus) {
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadData.Pending:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadStatus.Pending;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadData.InProgress:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadStatus.InProgress;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadData.Succeeded:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||||
|
case VariantAnalysisScannedRepositoryDownloadData.Failed:
|
||||||
|
return VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||||
|
default:
|
||||||
|
assertNever(downloadedStatus);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
export interface VariantAnalysisRepositoryTaskData {
|
||||||
|
repository: RepositoryData;
|
||||||
|
analysisStatus: VariantAnalysisRepoStatusData;
|
||||||
|
resultCount?: number;
|
||||||
|
artifactSizeInBytes?: number;
|
||||||
|
failureMessage?: string;
|
||||||
|
databaseCommitSha?: string;
|
||||||
|
sourceLocationPrefix?: string;
|
||||||
|
artifactUrl?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RepositoryData {
|
||||||
|
id: number;
|
||||||
|
fullName: string;
|
||||||
|
private: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum VariantAnalysisRepoStatusData {
|
||||||
|
Pending = "pending",
|
||||||
|
InProgress = "inProgress",
|
||||||
|
Succeeded = "succeeded",
|
||||||
|
Failed = "failed",
|
||||||
|
Canceled = "canceled",
|
||||||
|
TimedOut = "timedOut",
|
||||||
|
}
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
import { outputJson, readJson } from "fs-extra";
|
||||||
|
import { join } from "path";
|
||||||
|
import { VariantAnalysisRepositoryTask } from "../shared/variant-analysis";
|
||||||
|
import { mapRepoTaskToData } from "./repo-task-to-data-mapper";
|
||||||
|
import { mapRepoTaskToDomain } from "./repo-task-to-domain-mapper";
|
||||||
|
|
||||||
|
export const REPO_TASK_FILENAME = "repo_task.json";
|
||||||
|
|
||||||
|
export async function writeRepoTask(
|
||||||
|
storageDirectory: string,
|
||||||
|
repoTask: VariantAnalysisRepositoryTask,
|
||||||
|
): Promise<void> {
|
||||||
|
const repoTaskData = mapRepoTaskToData(repoTask);
|
||||||
|
await outputJson(join(storageDirectory, REPO_TASK_FILENAME), repoTaskData);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function readRepoTask(
|
||||||
|
storageDirectory: string,
|
||||||
|
): Promise<VariantAnalysisRepositoryTask> {
|
||||||
|
const repoTaskData = await readJson(
|
||||||
|
join(storageDirectory, REPO_TASK_FILENAME),
|
||||||
|
);
|
||||||
|
return mapRepoTaskToDomain(repoTaskData);
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
import { assertNever } from "../../pure/helpers-pure";
|
||||||
|
import {
|
||||||
|
VariantAnalysisRepositoryTask,
|
||||||
|
VariantAnalysisRepoStatus,
|
||||||
|
} from "../shared/variant-analysis";
|
||||||
|
import {
|
||||||
|
VariantAnalysisRepositoryTaskData,
|
||||||
|
VariantAnalysisRepoStatusData,
|
||||||
|
} from "./repo-task-data-types";
|
||||||
|
|
||||||
|
export function mapRepoTaskToData(
|
||||||
|
repoTask: VariantAnalysisRepositoryTask,
|
||||||
|
): VariantAnalysisRepositoryTaskData {
|
||||||
|
return {
|
||||||
|
repository: {
|
||||||
|
id: repoTask.repository.id,
|
||||||
|
fullName: repoTask.repository.fullName,
|
||||||
|
private: repoTask.repository.private,
|
||||||
|
},
|
||||||
|
analysisStatus: mapRepoTaskAnalysisStatusToData(repoTask.analysisStatus),
|
||||||
|
resultCount: repoTask.resultCount,
|
||||||
|
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||||
|
failureMessage: repoTask.failureMessage,
|
||||||
|
databaseCommitSha: repoTask.databaseCommitSha,
|
||||||
|
sourceLocationPrefix: repoTask.sourceLocationPrefix,
|
||||||
|
artifactUrl: repoTask.artifactUrl,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapRepoTaskAnalysisStatusToData(
|
||||||
|
analysisStatus: VariantAnalysisRepoStatus,
|
||||||
|
): VariantAnalysisRepoStatusData {
|
||||||
|
switch (analysisStatus) {
|
||||||
|
case VariantAnalysisRepoStatus.Pending:
|
||||||
|
return VariantAnalysisRepoStatusData.Pending;
|
||||||
|
case VariantAnalysisRepoStatus.InProgress:
|
||||||
|
return VariantAnalysisRepoStatusData.InProgress;
|
||||||
|
case VariantAnalysisRepoStatus.Succeeded:
|
||||||
|
return VariantAnalysisRepoStatusData.Succeeded;
|
||||||
|
case VariantAnalysisRepoStatus.Failed:
|
||||||
|
return VariantAnalysisRepoStatusData.Failed;
|
||||||
|
case VariantAnalysisRepoStatus.Canceled:
|
||||||
|
return VariantAnalysisRepoStatusData.Canceled;
|
||||||
|
case VariantAnalysisRepoStatus.TimedOut:
|
||||||
|
return VariantAnalysisRepoStatusData.TimedOut;
|
||||||
|
default:
|
||||||
|
assertNever(analysisStatus);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
import { assertNever } from "../../pure/helpers-pure";
|
||||||
|
import {
|
||||||
|
VariantAnalysisRepositoryTask,
|
||||||
|
VariantAnalysisRepoStatus,
|
||||||
|
} from "../shared/variant-analysis";
|
||||||
|
import {
|
||||||
|
VariantAnalysisRepositoryTaskData,
|
||||||
|
VariantAnalysisRepoStatusData,
|
||||||
|
} from "./repo-task-data-types";
|
||||||
|
|
||||||
|
export function mapRepoTaskToDomain(
|
||||||
|
repoTask: VariantAnalysisRepositoryTaskData,
|
||||||
|
): VariantAnalysisRepositoryTask {
|
||||||
|
return {
|
||||||
|
repository: {
|
||||||
|
id: repoTask.repository.id,
|
||||||
|
fullName: repoTask.repository.fullName,
|
||||||
|
private: repoTask.repository.private,
|
||||||
|
},
|
||||||
|
analysisStatus: mapRepoTaskAnalysisStatusToDomain(repoTask.analysisStatus),
|
||||||
|
resultCount: repoTask.resultCount,
|
||||||
|
artifactSizeInBytes: repoTask.artifactSizeInBytes,
|
||||||
|
failureMessage: repoTask.failureMessage,
|
||||||
|
databaseCommitSha: repoTask.databaseCommitSha,
|
||||||
|
sourceLocationPrefix: repoTask.sourceLocationPrefix,
|
||||||
|
artifactUrl: repoTask.artifactUrl,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapRepoTaskAnalysisStatusToDomain(
|
||||||
|
analysisStatus: VariantAnalysisRepoStatusData,
|
||||||
|
): VariantAnalysisRepoStatus {
|
||||||
|
switch (analysisStatus) {
|
||||||
|
case VariantAnalysisRepoStatusData.Pending:
|
||||||
|
return VariantAnalysisRepoStatus.Pending;
|
||||||
|
case VariantAnalysisRepoStatusData.InProgress:
|
||||||
|
return VariantAnalysisRepoStatus.InProgress;
|
||||||
|
case VariantAnalysisRepoStatusData.Succeeded:
|
||||||
|
return VariantAnalysisRepoStatus.Succeeded;
|
||||||
|
case VariantAnalysisRepoStatusData.Failed:
|
||||||
|
return VariantAnalysisRepoStatus.Failed;
|
||||||
|
case VariantAnalysisRepoStatusData.Canceled:
|
||||||
|
return VariantAnalysisRepoStatus.Canceled;
|
||||||
|
case VariantAnalysisRepoStatusData.TimedOut:
|
||||||
|
return VariantAnalysisRepoStatus.TimedOut;
|
||||||
|
default:
|
||||||
|
assertNever(analysisStatus);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -264,15 +264,11 @@ export class VariantAnalysisManager
|
|||||||
} else {
|
} else {
|
||||||
await this.setVariantAnalysis(variantAnalysis);
|
await this.setVariantAnalysis(variantAnalysis);
|
||||||
|
|
||||||
try {
|
const repoStatesFromDisk = await readRepoStates(
|
||||||
const repoStates = await readRepoStates(
|
this.getRepoStatesStoragePath(variantAnalysis.id),
|
||||||
this.getRepoStatesStoragePath(variantAnalysis.id),
|
);
|
||||||
);
|
|
||||||
this.repoStates.set(variantAnalysis.id, repoStates);
|
this.repoStates.set(variantAnalysis.id, repoStatesFromDisk || {});
|
||||||
} catch (e) {
|
|
||||||
// Ignore this error, we simply might not have downloaded anything yet
|
|
||||||
this.repoStates.set(variantAnalysis.id, {});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!(await isVariantAnalysisComplete(
|
!(await isVariantAnalysisComplete(
|
||||||
@@ -598,10 +594,13 @@ export class VariantAnalysisManager
|
|||||||
VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||||
await this.onRepoStateUpdated(variantAnalysis.id, repoState);
|
await this.onRepoStateUpdated(variantAnalysis.id, repoState);
|
||||||
|
|
||||||
await writeRepoStates(
|
const repoStates = this.repoStates.get(variantAnalysis.id);
|
||||||
this.getRepoStatesStoragePath(variantAnalysis.id),
|
if (repoStates) {
|
||||||
this.repoStates.get(variantAnalysis.id),
|
await writeRepoStates(
|
||||||
);
|
this.getRepoStatesStoragePath(variantAnalysis.id),
|
||||||
|
repoStates,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async enqueueDownload(
|
public async enqueueDownload(
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { appendFile, pathExists, mkdir, outputJson, readJson } from "fs-extra";
|
import { appendFile, pathExists } from "fs-extra";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { EOL } from "os";
|
import { EOL } from "os";
|
||||||
import { join } from "path";
|
import { join } from "path";
|
||||||
@@ -17,6 +17,7 @@ import {
|
|||||||
import { DisposableObject, DisposeHandler } from "../pure/disposable-object";
|
import { DisposableObject, DisposeHandler } from "../pure/disposable-object";
|
||||||
import { EventEmitter } from "vscode";
|
import { EventEmitter } from "vscode";
|
||||||
import { unzipFile } from "../pure/zip";
|
import { unzipFile } from "../pure/zip";
|
||||||
|
import { readRepoTask, writeRepoTask } from "./store/repo-task-store";
|
||||||
|
|
||||||
type CacheKey = `${number}/${string}`;
|
type CacheKey = `${number}/${string}`;
|
||||||
|
|
||||||
@@ -37,7 +38,6 @@ export type LoadResultsOptions = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export class VariantAnalysisResultsManager extends DisposableObject {
|
export class VariantAnalysisResultsManager extends DisposableObject {
|
||||||
private static readonly REPO_TASK_FILENAME = "repo_task.json";
|
|
||||||
private static readonly RESULTS_DIRECTORY = "results";
|
private static readonly RESULTS_DIRECTORY = "results";
|
||||||
|
|
||||||
private readonly cachedResults: Map<
|
private readonly cachedResults: Map<
|
||||||
@@ -78,14 +78,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
|||||||
repoTask.repository.fullName,
|
repoTask.repository.fullName,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!(await pathExists(resultDirectory))) {
|
await writeRepoTask(resultDirectory, repoTask);
|
||||||
await mkdir(resultDirectory, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
await outputJson(
|
|
||||||
join(resultDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME),
|
|
||||||
repoTask,
|
|
||||||
);
|
|
||||||
|
|
||||||
const zipFilePath = join(resultDirectory, "results.zip");
|
const zipFilePath = join(resultDirectory, "results.zip");
|
||||||
|
|
||||||
@@ -184,8 +177,8 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
|||||||
repositoryFullName,
|
repositoryFullName,
|
||||||
);
|
);
|
||||||
|
|
||||||
const repoTask: VariantAnalysisRepositoryTask = await readJson(
|
const repoTask: VariantAnalysisRepositoryTask = await readRepoTask(
|
||||||
join(storageDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME),
|
storageDirectory,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!repoTask.databaseCommitSha || !repoTask.sourceLocationPrefix) {
|
if (!repoTask.databaseCommitSha || !repoTask.sourceLocationPrefix) {
|
||||||
|
|||||||
@@ -37,6 +37,14 @@ export class VariantAnalysisViewSerializer implements WebviewPanelSerializer {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Between the time the webview is deserialized and the time the extension
|
||||||
|
// is fully activated, the user may close the webview. In this case, we
|
||||||
|
// should not attempt to restore the view.
|
||||||
|
let disposed = false;
|
||||||
|
const unregisterOnDidDispose = webviewPanel.onDidDispose(() => {
|
||||||
|
disposed = true;
|
||||||
|
});
|
||||||
|
|
||||||
const variantAnalysisState: VariantAnalysisState =
|
const variantAnalysisState: VariantAnalysisState =
|
||||||
state as VariantAnalysisState;
|
state as VariantAnalysisState;
|
||||||
|
|
||||||
@@ -46,11 +54,16 @@ export class VariantAnalysisViewSerializer implements WebviewPanelSerializer {
|
|||||||
variantAnalysisState.variantAnalysisId,
|
variantAnalysisState.variantAnalysisId,
|
||||||
);
|
);
|
||||||
if (existingView) {
|
if (existingView) {
|
||||||
|
unregisterOnDidDispose.dispose();
|
||||||
await existingView.openView();
|
await existingView.openView();
|
||||||
webviewPanel.dispose();
|
webviewPanel.dispose();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (disposed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const view = new VariantAnalysisView(
|
const view = new VariantAnalysisView(
|
||||||
this.ctx,
|
this.ctx,
|
||||||
this.app,
|
this.app,
|
||||||
@@ -58,6 +71,8 @@ export class VariantAnalysisViewSerializer implements WebviewPanelSerializer {
|
|||||||
manager,
|
manager,
|
||||||
);
|
);
|
||||||
await view.restoreView(webviewPanel);
|
await view.restoreView(webviewPanel);
|
||||||
|
|
||||||
|
unregisterOnDidDispose.dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
private waitForExtensionFullyLoaded(): Promise<
|
private waitForExtensionFullyLoaded(): Promise<
|
||||||
|
|||||||
@@ -57,14 +57,21 @@ export function DataExtensionsEditor(): JSX.Element {
|
|||||||
case "showProgress":
|
case "showProgress":
|
||||||
setProgress(msg);
|
setProgress(msg);
|
||||||
break;
|
break;
|
||||||
case "setExistingModeledMethods":
|
case "addModeledMethods":
|
||||||
setModeledMethods((oldModeledMethods) => {
|
setModeledMethods((oldModeledMethods) => {
|
||||||
|
const filteredOldModeledMethods = msg.overrideNone
|
||||||
|
? Object.fromEntries(
|
||||||
|
Object.entries(oldModeledMethods).filter(
|
||||||
|
([, value]) => value.type !== "none",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
: oldModeledMethods;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...msg.existingModeledMethods,
|
...msg.modeledMethods,
|
||||||
...oldModeledMethods,
|
...filteredOldModeledMethods,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
assertNever(msg);
|
assertNever(msg);
|
||||||
@@ -107,6 +114,12 @@ export function DataExtensionsEditor(): JSX.Element {
|
|||||||
});
|
});
|
||||||
}, [externalApiUsages, modeledMethods]);
|
}, [externalApiUsages, modeledMethods]);
|
||||||
|
|
||||||
|
const onGenerateClick = useCallback(() => {
|
||||||
|
vscode.postMessage({
|
||||||
|
t: "generateExternalApi",
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DataExtensionsEditorContainer>
|
<DataExtensionsEditorContainer>
|
||||||
{progress.maxStep > 0 && (
|
{progress.maxStep > 0 && (
|
||||||
@@ -128,6 +141,12 @@ export function DataExtensionsEditor(): JSX.Element {
|
|||||||
<div>
|
<div>
|
||||||
<h3>External API modelling</h3>
|
<h3>External API modelling</h3>
|
||||||
<VSCodeButton onClick={onApplyClick}>Apply</VSCodeButton>
|
<VSCodeButton onClick={onApplyClick}>Apply</VSCodeButton>
|
||||||
|
|
||||||
|
<VSCodeButton onClick={onGenerateClick}>
|
||||||
|
Download and generate
|
||||||
|
</VSCodeButton>
|
||||||
|
<br />
|
||||||
|
<br />
|
||||||
<VSCodeDataGrid>
|
<VSCodeDataGrid>
|
||||||
<VSCodeDataGridRow rowType="header">
|
<VSCodeDataGridRow rowType="header">
|
||||||
<VSCodeDataGridCell cellType="columnheader" gridColumn={1}>
|
<VSCodeDataGridCell cellType="columnheader" gridColumn={1}>
|
||||||
|
|||||||
@@ -45,6 +45,10 @@ import { App } from "../../../../src/common/app";
|
|||||||
import { ExtensionApp } from "../../../../src/common/vscode/vscode-app";
|
import { ExtensionApp } from "../../../../src/common/vscode/vscode-app";
|
||||||
import { DbConfigStore } from "../../../../src/databases/config/db-config-store";
|
import { DbConfigStore } from "../../../../src/databases/config/db-config-store";
|
||||||
import { mockedObject } from "../../utils/mocking.helpers";
|
import { mockedObject } from "../../utils/mocking.helpers";
|
||||||
|
import {
|
||||||
|
REPO_STATES_FILENAME,
|
||||||
|
writeRepoStates,
|
||||||
|
} from "../../../../src/variant-analysis/store/repo-states-store";
|
||||||
|
|
||||||
// up to 3 minutes per test
|
// up to 3 minutes per test
|
||||||
jest.setTimeout(3 * 60 * 1000);
|
jest.setTimeout(3 * 60 * 1000);
|
||||||
@@ -119,8 +123,12 @@ describe("Variant Analysis Manager", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should read in the repo states if it exists", async () => {
|
it("should read in the repo states if it exists", async () => {
|
||||||
await fs.writeJson(
|
await writeRepoStates(
|
||||||
join(storagePath, variantAnalysis.id.toString(), "repo_states.json"),
|
join(
|
||||||
|
storagePath,
|
||||||
|
variantAnalysis.id.toString(),
|
||||||
|
REPO_STATES_FILENAME,
|
||||||
|
),
|
||||||
{
|
{
|
||||||
[scannedRepos[0].repository.id]: {
|
[scannedRepos[0].repository.id]: {
|
||||||
repositoryId: scannedRepos[0].repository.id,
|
repositoryId: scannedRepos[0].repository.id,
|
||||||
@@ -177,7 +185,7 @@ describe("Variant Analysis Manager", () => {
|
|||||||
repoStatesPath = join(
|
repoStatesPath = join(
|
||||||
storagePath,
|
storagePath,
|
||||||
variantAnalysis.id.toString(),
|
variantAnalysis.id.toString(),
|
||||||
"repo_states.json",
|
REPO_STATES_FILENAME,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user