Resolve ML models and pass them to the queryserver

This commit is contained in:
Henry Mercer
2021-11-18 12:27:41 +00:00
committed by Henry Mercer
parent e13349ceb0
commit 84ecbfc7a1
3 changed files with 54 additions and 4 deletions

View File

@@ -87,6 +87,15 @@ export type QlpacksInfo = { [name: string]: string[] };
*/
export type LanguagesInfo = { [name: string]: string[] };
/** Information about an ML model, as resolved by `codeql resolve ml-models`. */
export type MlModelInfo = {
checksum: string;
path: string;
};
/** The expected output of `codeql resolve ml-models`. */
export type MlModelsInfo = { models: MlModelInfo[] };
/**
* The expected output of `codeql resolve qlref`.
*/
@@ -584,6 +593,12 @@ export class CodeQLCliServer implements Disposable {
return await this.runJsonCodeQlCliCommand<QueryMetadata>(['resolve', 'metadata'], [queryPath], 'Resolving query metadata');
}
/** Resolves the ML models that should be available when evaluating a query. */
async resolveMlModels(additionalPacks: string[]): Promise<MlModelsInfo> {
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(['resolve', 'ml-models'], ['--additional-packs',
additionalPacks.join(path.delimiter)], 'Resolving ML models', false);
}
/**
* Gets the RAM setting for the query server.
* @param queryMemoryMb The maximum amount of RAM to use, in MB.
@@ -621,16 +636,16 @@ export class CodeQLCliServer implements Disposable {
return await this.runCodeQlCliCommand(['database', 'unbundle'], subcommandArgs, `Extracting ${archivePath} to directory ${target}`);
}
/**
* Uses a .qhelp file to generate Query Help documentation in a specified format.
* @param pathToQhelp The path to the .qhelp file
* @param format The format in which the query help should be generated {@link https://codeql.github.com/docs/codeql-cli/manual/generate-query-help/#cmdoption-codeql-generate-query-help-format}
* @param outputDirectory The output directory for the generated file
*/
async generateQueryHelp(pathToQhelp:string, outputDirectory?: string): Promise<string> {
async generateQueryHelp(pathToQhelp: string, outputDirectory?: string): Promise<string> {
const subcommandArgs = ['--format=markdown'];
if(outputDirectory) subcommandArgs.push('--output', outputDirectory);
if (outputDirectory) subcommandArgs.push('--output', outputDirectory);
subcommandArgs.push(pathToQhelp);
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
@@ -1166,6 +1181,11 @@ export class CliVersionConstraint {
*/
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
/**
* CLI version where the `resolve ml-models` subcommand was introduced.
*/
public static CLI_VERSION_WITH_RESOLVE_ML_MODELS = new SemVer('2.7.3');
constructor(private readonly cli: CodeQLCliServer) {
/**/
}
@@ -1210,4 +1230,8 @@ export class CliVersionConstraint {
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES);
}
async supportsResolveMlModels() {
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
}
}

View File

@@ -711,6 +711,11 @@ export interface EvaluateQueriesParams {
export type TemplateDefinitions = { [key: string]: TemplateSource }
export interface MlModel {
/** A URI pointing to the root directory of the model. */
uri: string;
}
/**
* A single query that should be run
*/
@@ -744,6 +749,11 @@ export interface QueryToRun {
* map should be set to the empty set or give an error.
*/
allowUnknownTemplates: boolean;
/**
* The list of ML models that should be made available
* when evaluating the query.
*/
availableMlModels?: MlModel[];
}
/**

View File

@@ -86,6 +86,7 @@ export class QueryInfo {
async run(
qs: qsClient.QueryServerClient,
upgradeQlo: string | undefined,
availableMlModels: cli.MlModelInfo[],
progress: ProgressCallback,
token: CancellationToken,
): Promise<messages.EvaluationResult> {
@@ -93,12 +94,15 @@ export class QueryInfo {
const callbackId = qs.registerCallback(res => { result = res; });
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString() }));
const queryToRun: messages.QueryToRun = {
resultsPath: this.resultsPaths.resultsPath,
qlo: Uri.file(this.compiledQueryPath).toString(),
compiledUpgrade: upgradeQlo && Uri.file(upgradeQlo).toString(),
allowUnknownTemplates: true,
templateValues: this.templates,
availableMlModels: availableMlModelUris,
id: callbackId,
timeoutSecs: qs.config.timeoutSecs,
};
@@ -612,6 +616,18 @@ export async function compileAndRunQueryAgainstDatabase(
void logger.log(`Couldn't resolve metadata for ${qlProgram.queryPath}: ${e}`);
}
let availableMlModels: cli.MlModelInfo[] = [];
if (await cliServer.cliConstraints.supportsResolveMlModels()) {
try {
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders)).models;
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
} catch (e) {
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}: ${e}`;
void logger.log(message);
void showAndLogErrorMessage(message);
}
}
const query = new QueryInfo(qlProgram, db, packConfig.dbscheme, quickEvalPosition, metadata, templates);
const upgradeDir = await tmp.dir({ dir: upgradesTmpDir.name, unsafeCleanup: true });
@@ -634,7 +650,7 @@ export async function compileAndRunQueryAgainstDatabase(
}
if (errors.length === 0) {
const result = await query.run(qs, upgradeQlo, progress, token);
const result = await query.run(qs, upgradeQlo, availableMlModels, progress, token);
if (result.resultType !== messages.QueryResultType.SUCCESS) {
const message = result.message || 'Failed to run query';
void logger.log(message);