Remove codeQL.monitorRemoteQuery command
This commit is contained in:
@@ -4,28 +4,28 @@ import {
|
||||
CancellationTokenSource,
|
||||
commands,
|
||||
Disposable,
|
||||
env,
|
||||
ExtensionContext,
|
||||
extensions,
|
||||
languages,
|
||||
ProgressLocation,
|
||||
ProgressOptions,
|
||||
Uri,
|
||||
window as Window,
|
||||
env,
|
||||
window,
|
||||
ProviderResult,
|
||||
QuickPickItem,
|
||||
Range,
|
||||
workspace,
|
||||
ProviderResult,
|
||||
Uri,
|
||||
version as vscodeVersion,
|
||||
window as Window,
|
||||
window,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import { LanguageClient } from "vscode-languageclient/node";
|
||||
import { platform, arch } from "os";
|
||||
import { arch, platform } from "os";
|
||||
import { ensureDir } from "fs-extra";
|
||||
import { join, basename } from "path";
|
||||
import { basename, join } from "path";
|
||||
import { dirSync } from "tmp-promise";
|
||||
import { testExplorerExtensionId, TestHub } from "vscode-test-adapter-api";
|
||||
import { parse, lt } from "semver";
|
||||
import { lt, parse } from "semver";
|
||||
|
||||
import { AstViewer } from "./astViewer";
|
||||
import {
|
||||
@@ -47,10 +47,10 @@ import { install } from "./languageSupport";
|
||||
import { DatabaseItem, DatabaseManager } from "./databases";
|
||||
import { DatabaseUI } from "./databases-ui";
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider,
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
} from "./contextual/templateProvider";
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -64,22 +64,22 @@ import {
|
||||
} from "./distribution";
|
||||
import {
|
||||
findLanguage,
|
||||
tmpDirDisposal,
|
||||
showBinaryChoiceDialog,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogInformationMessage,
|
||||
showAndLogWarningMessage,
|
||||
showBinaryChoiceDialog,
|
||||
showInformationMessageWithAction,
|
||||
tmpDir,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
tmpDirDisposal,
|
||||
} from "./helpers";
|
||||
import { asError, assertNever, getErrorMessage } from "./pure/helpers-pure";
|
||||
import { spawnIdeServer } from "./ide-server";
|
||||
import { ResultsView } from "./interface";
|
||||
import { WebviewReveal } from "./interface-utils";
|
||||
import {
|
||||
ideServerLogger,
|
||||
extLogger,
|
||||
ideServerLogger,
|
||||
ProgressReporter,
|
||||
queryServerLogger,
|
||||
} from "./common";
|
||||
@@ -97,8 +97,8 @@ import {
|
||||
commandRunner,
|
||||
commandRunnerWithProgress,
|
||||
ProgressCallback,
|
||||
withProgress,
|
||||
ProgressUpdate,
|
||||
withProgress,
|
||||
} from "./commandRunner";
|
||||
import { CodeQlStatusBarHandler } from "./status-bar";
|
||||
|
||||
@@ -115,7 +115,6 @@ import {
|
||||
exportSelectedRemoteQueryResults,
|
||||
exportVariantAnalysisResults,
|
||||
} from "./remote-queries/export-results";
|
||||
import { RemoteQuery } from "./remote-queries/remote-query";
|
||||
import { EvalLogViewer } from "./eval-log-viewer";
|
||||
import { SummaryLanguageSupport } from "./log-insights/summary-language-support";
|
||||
import { JoinOrderScannerProvider } from "./log-insights/join-order";
|
||||
@@ -1129,15 +1128,6 @@ async function activateWithInstalledDistribution(
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
"codeQL.monitorRemoteQuery",
|
||||
async (queryId: string, query: RemoteQuery, token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(queryId, query, token);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner("codeQL.copyRepoList", async (queryId: string) => {
|
||||
await rqm.copyRemoteQueryRepoListToClipboard(queryId);
|
||||
|
||||
@@ -539,11 +539,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
await Promise.all(
|
||||
this.treeDataProvider.allHistory.map(async (item) => {
|
||||
if (item.t === "remote") {
|
||||
await this.remoteQueriesManager.rehydrateRemoteQuery(
|
||||
item.queryId,
|
||||
item.remoteQuery,
|
||||
item.status,
|
||||
);
|
||||
await this.remoteQueriesManager.rehydrateRemoteQuery(item.queryId);
|
||||
}
|
||||
if (item.t === "variant-analysis") {
|
||||
await this.variantAnalysisManager.rehydrateVariantAnalysis(
|
||||
|
||||
@@ -1,113 +1,11 @@
|
||||
import { join } from "path";
|
||||
import { pathExists, readFile, writeFile } from "fs-extra";
|
||||
import {
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogWarningMessage,
|
||||
tmpDir,
|
||||
} from "../../helpers";
|
||||
import { pathExists, writeFile } from "fs-extra";
|
||||
import { Credentials } from "../../common/authentication";
|
||||
import { extLogger } from "../../common";
|
||||
import { RemoteQueryWorkflowResult } from "../remote-query-workflow-result";
|
||||
import { DownloadLink, createDownloadPath } from "../download-link";
|
||||
import { createDownloadPath, DownloadLink } from "../download-link";
|
||||
import { RemoteQuery } from "../remote-query";
|
||||
import {
|
||||
RemoteQueryFailureIndexItem,
|
||||
RemoteQueryResultIndex,
|
||||
RemoteQuerySuccessIndexItem,
|
||||
} from "../remote-query-result-index";
|
||||
import { asError, getErrorMessage } from "../../pure/helpers-pure";
|
||||
import { unzipFile } from "../../pure/zip";
|
||||
import { VariantAnalysis } from "../shared/variant-analysis";
|
||||
import { redactableError } from "../../pure/errors";
|
||||
|
||||
export const RESULT_INDEX_ARTIFACT_NAME = "result-index";
|
||||
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
sha?: string;
|
||||
results_count: number;
|
||||
bqrs_file_size: number;
|
||||
sarif_file_size?: number;
|
||||
source_location_prefix: string;
|
||||
}
|
||||
|
||||
interface ApiFailureIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
interface ApiResultIndex {
|
||||
successes: ApiSuccessIndexItem[];
|
||||
failures: ApiFailureIndexItem[];
|
||||
}
|
||||
|
||||
export async function getRemoteQueryIndex(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery,
|
||||
): Promise<RemoteQueryResultIndex | undefined> {
|
||||
const controllerRepo = remoteQuery.controllerRepository;
|
||||
const owner = controllerRepo.owner;
|
||||
const repoName = controllerRepo.name;
|
||||
const workflowRunId = remoteQuery.actionsWorkflowRunId;
|
||||
|
||||
const workflowUri = `https://github.com/${owner}/${repoName}/actions/runs/${workflowRunId}`;
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(
|
||||
credentials,
|
||||
owner,
|
||||
repoName,
|
||||
workflowRunId,
|
||||
);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName(
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
artifactList,
|
||||
);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await getResultIndex(
|
||||
credentials,
|
||||
owner,
|
||||
repoName,
|
||||
resultIndexArtifactId,
|
||||
);
|
||||
|
||||
const successes = resultIndex?.successes.map((item) => {
|
||||
const artifactId = getArtifactIDfromName(
|
||||
item.id,
|
||||
workflowUri,
|
||||
artifactList,
|
||||
);
|
||||
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
artifactId,
|
||||
nwo: item.nwo,
|
||||
sha: item.sha,
|
||||
resultCount: item.results_count,
|
||||
bqrsFileSize: item.bqrs_file_size,
|
||||
sarifFileSize: item.sarif_file_size,
|
||||
sourceLocationPrefix: item.source_location_prefix,
|
||||
} as RemoteQuerySuccessIndexItem;
|
||||
});
|
||||
|
||||
const failures = resultIndex?.failures.map((item) => {
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
nwo: item.nwo,
|
||||
error: item.error,
|
||||
} as RemoteQueryFailureIndexItem;
|
||||
});
|
||||
|
||||
return {
|
||||
artifactsUrlPath,
|
||||
successes: successes || [],
|
||||
failures: failures || [],
|
||||
};
|
||||
}
|
||||
|
||||
export async function cancelRemoteQuery(
|
||||
credentials: Credentials,
|
||||
@@ -175,208 +73,6 @@ export async function downloadArtifactFromLink(
|
||||
return join(extractedPath, downloadLink.innerFilePath || "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a specific artifact is present in the list of artifacts of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the artifact for.
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @returns A boolean indicating if the artifact is available.
|
||||
*/
|
||||
export async function isArtifactAvailable(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
artifactName: string,
|
||||
): Promise<boolean> {
|
||||
const artifactList = await listWorkflowRunArtifacts(
|
||||
credentials,
|
||||
owner,
|
||||
repo,
|
||||
workflowRunId,
|
||||
);
|
||||
|
||||
return tryGetArtifactIDfromName(artifactName, artifactList) !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the result index artifact and extracts the result index items.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns An object containing the result index.
|
||||
*/
|
||||
async function getResultIndex(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number,
|
||||
): Promise<ApiResultIndex | undefined> {
|
||||
const artifactPath = await downloadArtifact(
|
||||
credentials,
|
||||
owner,
|
||||
repo,
|
||||
artifactId,
|
||||
);
|
||||
const indexFilePath = join(artifactPath, "index.json");
|
||||
if (!(await pathExists(indexFilePath))) {
|
||||
void showAndLogWarningMessage(
|
||||
"Could not find an `index.json` file in the result artifact.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await readFile(join(artifactPath, "index.json"), "utf8");
|
||||
|
||||
try {
|
||||
return JSON.parse(resultIndex);
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid result index file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns The workflow run status.
|
||||
*/
|
||||
export async function getWorkflowStatus(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
): Promise<RemoteQueryWorkflowResult> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const workflowRun = await octokit.rest.actions.getWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId,
|
||||
});
|
||||
|
||||
if (workflowRun.data.status === "completed") {
|
||||
if (workflowRun.data.conclusion === "success") {
|
||||
return { status: "CompletedSuccessfully" };
|
||||
} else {
|
||||
const error = getWorkflowError(workflowRun.data.conclusion);
|
||||
return { status: "CompletedUnsuccessfully", error };
|
||||
}
|
||||
}
|
||||
|
||||
return { status: "InProgress" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the workflow run artifacts for the given workflow run ID.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to list artifacts for.
|
||||
* @returns An array of artifact details (including artifact name and ID).
|
||||
*/
|
||||
async function listWorkflowRunArtifacts(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
) {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
// There are limits on the number of artifacts that are returned by the API
|
||||
// so we use paging to make sure we retrieve all of them.
|
||||
let morePages = true;
|
||||
let pageNum = 1;
|
||||
const allArtifacts = [];
|
||||
|
||||
while (morePages) {
|
||||
const response = await octokit.rest.actions.listWorkflowRunArtifacts({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId,
|
||||
per_page: 100,
|
||||
page: pageNum,
|
||||
});
|
||||
|
||||
allArtifacts.push(...response.data.artifacts);
|
||||
pageNum++;
|
||||
if (response.data.artifacts.length < 100) {
|
||||
morePages = false;
|
||||
}
|
||||
}
|
||||
|
||||
return allArtifacts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name.
|
||||
*/
|
||||
function getArtifactIDfromName(
|
||||
artifactName: string,
|
||||
workflowUri: string,
|
||||
artifacts: Array<{ id: number; name: string }>,
|
||||
): number {
|
||||
const artifactId = tryGetArtifactIDfromName(artifactName, artifacts);
|
||||
|
||||
if (!artifactId) {
|
||||
const errorMessage = `Could not find artifact with name ${artifactName} in workflow ${workflowUri}.
|
||||
Please check whether the workflow run has successfully completed.`;
|
||||
throw Error(errorMessage);
|
||||
}
|
||||
|
||||
return artifactId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name, if it exists.
|
||||
*/
|
||||
function tryGetArtifactIDfromName(
|
||||
artifactName: string,
|
||||
artifacts: Array<{ id: number; name: string }>,
|
||||
): number | undefined {
|
||||
const artifact = artifacts.find((a) => a.name === artifactName);
|
||||
|
||||
return artifact?.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an artifact from a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param artifactId The ID of the artifact to download.
|
||||
* @returns The path to the enclosing directory of the unzipped artifact.
|
||||
*/
|
||||
async function downloadArtifact(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number,
|
||||
): Promise<string> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.rest.actions.downloadArtifact({
|
||||
owner,
|
||||
repo,
|
||||
artifact_id: artifactId,
|
||||
archive_format: "zip",
|
||||
});
|
||||
const artifactPath = join(tmpDir.name, `${artifactId}`);
|
||||
await unzipBuffer(
|
||||
response.data as ArrayBuffer,
|
||||
`${artifactPath}.zip`,
|
||||
artifactPath,
|
||||
);
|
||||
return artifactPath;
|
||||
}
|
||||
|
||||
async function unzipBuffer(
|
||||
data: ArrayBuffer,
|
||||
filePath: string,
|
||||
@@ -388,119 +84,3 @@ async function unzipBuffer(
|
||||
void extLogger.log(`Unzipping file to ${destinationPath}`);
|
||||
await unzipFile(filePath, destinationPath);
|
||||
}
|
||||
|
||||
function getWorkflowError(conclusion: string | null): string {
|
||||
if (!conclusion) {
|
||||
return "Workflow finished without a conclusion";
|
||||
}
|
||||
|
||||
if (conclusion === "cancelled") {
|
||||
return "Variant analysis execution was cancelled.";
|
||||
}
|
||||
|
||||
if (conclusion === "timed_out") {
|
||||
return "Variant analysis execution timed out.";
|
||||
}
|
||||
|
||||
if (conclusion === "failure") {
|
||||
// TODO: Get the actual error from the workflow or potentially
|
||||
// from an artifact from the action itself.
|
||||
return "Variant analysis execution has failed.";
|
||||
}
|
||||
|
||||
return `Unexpected variant analysis execution conclusion: ${conclusion}`;
|
||||
}
|
||||
|
||||
const repositoriesMetadataQuery = `query Stars($repos: String!, $pageSize: Int!, $cursor: String) {
|
||||
search(
|
||||
query: $repos
|
||||
type: REPOSITORY
|
||||
first: $pageSize
|
||||
after: $cursor
|
||||
) {
|
||||
edges {
|
||||
node {
|
||||
... on Repository {
|
||||
name
|
||||
owner {
|
||||
login
|
||||
}
|
||||
stargazerCount
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
cursor
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
type RepositoriesMetadataQueryResponse = {
|
||||
search: {
|
||||
edges: Array<{
|
||||
cursor: string;
|
||||
node: {
|
||||
name: string;
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
stargazerCount: number;
|
||||
updatedAt: string; // Actually a ISO Date string
|
||||
};
|
||||
}>;
|
||||
};
|
||||
};
|
||||
|
||||
export type RepositoriesMetadata = Record<
|
||||
string,
|
||||
{ starCount: number; lastUpdated: number }
|
||||
>;
|
||||
|
||||
export async function getRepositoriesMetadata(
|
||||
credentials: Credentials,
|
||||
nwos: string[],
|
||||
pageSize = 100,
|
||||
): Promise<RepositoriesMetadata> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const repos = `repo:${nwos.join(" repo:")} fork:true`;
|
||||
let cursor = null;
|
||||
const metadata: RepositoriesMetadata = {};
|
||||
try {
|
||||
do {
|
||||
const response: RepositoriesMetadataQueryResponse = await octokit.graphql(
|
||||
{
|
||||
query: repositoriesMetadataQuery,
|
||||
repos,
|
||||
pageSize,
|
||||
cursor,
|
||||
},
|
||||
);
|
||||
cursor =
|
||||
response.search.edges.length === pageSize
|
||||
? response.search.edges[pageSize - 1].cursor
|
||||
: null;
|
||||
|
||||
for (const edge of response.search.edges) {
|
||||
const node = edge.node;
|
||||
const owner = node.owner.login;
|
||||
const name = node.name;
|
||||
const starCount = node.stargazerCount;
|
||||
// lastUpdated is always negative since it happened in the past.
|
||||
const lastUpdated = new Date(node.updatedAt).getTime() - Date.now();
|
||||
metadata[`${owner}/${name}`] = {
|
||||
starCount,
|
||||
lastUpdated,
|
||||
};
|
||||
}
|
||||
} while (cursor);
|
||||
} catch (e) {
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Error retrieving repository metadata for variant analysis: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
@@ -1,38 +1,16 @@
|
||||
import {
|
||||
CancellationToken,
|
||||
commands,
|
||||
env,
|
||||
EventEmitter,
|
||||
ExtensionContext,
|
||||
} from "vscode";
|
||||
import { CancellationToken, env, EventEmitter, ExtensionContext } from "vscode";
|
||||
import { join } from "path";
|
||||
import { pathExists, readFile, remove, writeFile } from "fs-extra";
|
||||
import { pathExists, readFile, remove } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
} from "../helpers";
|
||||
import { showAndLogExceptionWithTelemetry } from "../helpers";
|
||||
import { Logger } from "../common";
|
||||
import { RemoteQueriesView } from "./remote-queries-view";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import { RemoteQueriesMonitor } from "./remote-queries-monitor";
|
||||
import {
|
||||
getRemoteQueryIndex,
|
||||
getRepositoriesMetadata,
|
||||
RepositoriesMetadata,
|
||||
} from "./gh-api/gh-actions-api-client";
|
||||
import { RemoteQueryResultIndex } from "./remote-query-result-index";
|
||||
import {
|
||||
RemoteQueryResult,
|
||||
sumAnalysisSummariesResults,
|
||||
} from "./remote-query-result";
|
||||
import { DownloadLink } from "./download-link";
|
||||
import { RemoteQueryResult } from "./remote-query-result";
|
||||
import { AnalysesResultsManager } from "./analyses-results-manager";
|
||||
import { asError, assertNever, getErrorMessage } from "../pure/helpers-pure";
|
||||
import { asError, getErrorMessage } from "../pure/helpers-pure";
|
||||
import { QueryStatus } from "../query-status";
|
||||
import { DisposableObject } from "../pure/disposable-object";
|
||||
import { AnalysisResults } from "./shared/analysis-result";
|
||||
@@ -72,13 +50,12 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
private readonly remoteQueryRemovedEventEmitter;
|
||||
private readonly remoteQueryStatusUpdateEventEmitter;
|
||||
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly view: RemoteQueriesView;
|
||||
|
||||
constructor(
|
||||
ctx: ExtensionContext,
|
||||
private readonly app: App,
|
||||
app: App,
|
||||
cliServer: CodeQLCliServer,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
@@ -91,7 +68,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
logger,
|
||||
);
|
||||
this.view = new RemoteQueriesView(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(logger);
|
||||
|
||||
this.remoteQueryAddedEventEmitter = this.push(
|
||||
new EventEmitter<NewQueryEvent>(),
|
||||
@@ -110,19 +86,11 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
this.push(this.view);
|
||||
}
|
||||
|
||||
public async rehydrateRemoteQuery(
|
||||
queryId: string,
|
||||
query: RemoteQuery,
|
||||
status: QueryStatus,
|
||||
) {
|
||||
public async rehydrateRemoteQuery(queryId: string) {
|
||||
if (!(await this.queryRecordExists(queryId))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace.
|
||||
this.remoteQueryRemovedEventEmitter.fire({ queryId });
|
||||
} else if (status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
void commands.executeCommand("codeQL.monitorRemoteQuery", queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,72 +129,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
public async monitorRemoteQuery(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
cancellationToken: CancellationToken,
|
||||
): Promise<void> {
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(
|
||||
remoteQuery,
|
||||
this.app.credentials,
|
||||
cancellationToken,
|
||||
);
|
||||
|
||||
const executionEndTime = Date.now();
|
||||
|
||||
if (queryWorkflowResult.status === "CompletedSuccessfully") {
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
} else if (queryWorkflowResult.status === "CompletedUnsuccessfully") {
|
||||
if (queryWorkflowResult.error?.includes("cancelled")) {
|
||||
// Workflow was cancelled on the server
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: "Cancelled",
|
||||
});
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
void showAndLogInformationMessage("Variant analysis was cancelled");
|
||||
} else {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: queryWorkflowResult.error,
|
||||
});
|
||||
void showAndLogErrorMessage(
|
||||
`Variant analysis execution failed. Error: ${queryWorkflowResult.error}`,
|
||||
);
|
||||
}
|
||||
} else if (queryWorkflowResult.status === "Cancelled") {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
failureReason: "Cancelled",
|
||||
});
|
||||
await this.downloadAvailableResults(
|
||||
queryId,
|
||||
remoteQuery,
|
||||
executionEndTime,
|
||||
);
|
||||
void showAndLogInformationMessage("Variant analysis was cancelled");
|
||||
} else if (queryWorkflowResult.status === "InProgress") {
|
||||
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`Unexpected status: ${queryWorkflowResult.status}`,
|
||||
);
|
||||
} else {
|
||||
// Ensure all cases are covered
|
||||
assertNever(queryWorkflowResult.status);
|
||||
}
|
||||
}
|
||||
|
||||
public async autoDownloadRemoteQueryResults(
|
||||
queryResult: RemoteQueryResult,
|
||||
token: CancellationToken,
|
||||
@@ -268,65 +170,10 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
private mapQueryResult(
|
||||
executionEndTime: number,
|
||||
resultIndex: RemoteQueryResultIndex,
|
||||
queryId: string,
|
||||
metadata: RepositoriesMetadata,
|
||||
): RemoteQueryResult {
|
||||
const analysisSummaries = resultIndex.successes.map((item) => ({
|
||||
nwo: item.nwo,
|
||||
databaseSha: item.sha || "HEAD",
|
||||
resultCount: item.resultCount,
|
||||
sourceLocationPrefix: item.sourceLocationPrefix,
|
||||
fileSizeInBytes: item.sarifFileSize
|
||||
? item.sarifFileSize
|
||||
: item.bqrsFileSize,
|
||||
starCount: metadata[item.nwo]?.starCount,
|
||||
lastUpdated: metadata[item.nwo]?.lastUpdated,
|
||||
downloadLink: {
|
||||
id: item.artifactId.toString(),
|
||||
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
|
||||
innerFilePath: item.sarifFileSize ? "results.sarif" : "results.bqrs",
|
||||
queryId,
|
||||
} as DownloadLink,
|
||||
}));
|
||||
const analysisFailures = resultIndex.failures.map((item) => ({
|
||||
nwo: item.nwo,
|
||||
error: item.error,
|
||||
}));
|
||||
|
||||
return {
|
||||
executionEndTime,
|
||||
analysisSummaries,
|
||||
analysisFailures,
|
||||
queryId,
|
||||
};
|
||||
}
|
||||
|
||||
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
await this.view.showResults(query, queryResult);
|
||||
}
|
||||
|
||||
private async askToOpenResults(
|
||||
query: RemoteQuery,
|
||||
queryResult: RemoteQueryResult,
|
||||
): Promise<void> {
|
||||
const totalResultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(
|
||||
message,
|
||||
"View",
|
||||
);
|
||||
if (shouldOpenView) {
|
||||
await this.openResults(query, queryResult);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRemoteQueryResult(
|
||||
queryId: string,
|
||||
): Promise<RemoteQueryResult> {
|
||||
@@ -336,15 +183,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
);
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(
|
||||
queryId: string,
|
||||
fileName: string,
|
||||
obj: T,
|
||||
): Promise<void> {
|
||||
const filePath = join(this.storagePath, queryId, fileName);
|
||||
await writeFile(filePath, JSON.stringify(obj, null, 2), "utf8");
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(
|
||||
queryId: string,
|
||||
fileName: string,
|
||||
@@ -363,73 +201,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether there's a result index artifact available for the given query.
|
||||
* If so, set the query status to `Completed` and auto-download the results.
|
||||
*/
|
||||
private async downloadAvailableResults(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
executionEndTime: number,
|
||||
): Promise<void> {
|
||||
const resultIndex = await getRemoteQueryIndex(
|
||||
this.app.credentials,
|
||||
remoteQuery,
|
||||
);
|
||||
if (resultIndex) {
|
||||
const metadata = await this.getRepositoriesMetadata(resultIndex);
|
||||
const queryResult = this.mapQueryResult(
|
||||
executionEndTime,
|
||||
resultIndex,
|
||||
queryId,
|
||||
metadata,
|
||||
);
|
||||
const resultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Completed,
|
||||
repositoryCount: queryResult.analysisSummaries.length,
|
||||
resultCount,
|
||||
});
|
||||
|
||||
await this.storeJsonFile(queryId, "query-result.json", queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand(
|
||||
"codeQL.autoDownloadRemoteQueryResults",
|
||||
queryResult,
|
||||
);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(remoteQuery, queryResult).then(
|
||||
noop,
|
||||
(e: unknown) =>
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError(
|
||||
asError(e),
|
||||
)`Could not open query results. ${getErrorMessage(e)}`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
const controllerRepo = `${remoteQuery.controllerRepository.owner}/${remoteQuery.controllerRepository.name}`;
|
||||
const workflowRunUrl = `https://github.com/${controllerRepo}/actions/runs/${remoteQuery.actionsWorkflowRunId}`;
|
||||
void showAndLogExceptionWithTelemetry(
|
||||
redactableError`There was an issue retrieving the result for the query [${remoteQuery.queryName}](${workflowRunUrl}).`,
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Failed,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async getRepositoriesMetadata(resultIndex: RemoteQueryResultIndex) {
|
||||
const nwos = resultIndex.successes.map((s) => s.nwo);
|
||||
return await getRepositoriesMetadata(this.app.credentials, nwos);
|
||||
}
|
||||
|
||||
// Pulled from the analysis results manager, so that we can get access to
|
||||
// analyses results from the "export results" command.
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import * as vscode from "vscode";
|
||||
import { Logger } from "../common";
|
||||
import { Credentials } from "../common/authentication";
|
||||
import { sleep } from "../pure/time";
|
||||
import {
|
||||
getWorkflowStatus,
|
||||
isArtifactAvailable,
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
} from "./gh-api/gh-actions-api-client";
|
||||
import { RemoteQuery } from "./remote-query";
|
||||
import { RemoteQueryWorkflowResult } from "./remote-query-workflow-result";
|
||||
|
||||
export class RemoteQueriesMonitor {
|
||||
// With a sleep of 5 seconds, the maximum number of attempts takes
|
||||
// us to just over 2 days worth of monitoring.
|
||||
private static readonly maxAttemptCount = 17280;
|
||||
private static readonly sleepTime = 5000;
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
public async monitorQuery(
|
||||
remoteQuery: RemoteQuery,
|
||||
credentials: Credentials,
|
||||
cancellationToken: vscode.CancellationToken,
|
||||
): Promise<RemoteQueryWorkflowResult> {
|
||||
let attemptCount = 0;
|
||||
|
||||
while (attemptCount <= RemoteQueriesMonitor.maxAttemptCount) {
|
||||
await sleep(RemoteQueriesMonitor.sleepTime);
|
||||
|
||||
if (cancellationToken && cancellationToken.isCancellationRequested) {
|
||||
return { status: "Cancelled" };
|
||||
}
|
||||
|
||||
const workflowStatus = await getWorkflowStatus(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId,
|
||||
);
|
||||
|
||||
// Even if the workflow indicates it has completed, artifacts
|
||||
// might still take a while to become available. So we need to
|
||||
// check for the artifact before we can declare the workflow
|
||||
// as having completed.
|
||||
if (workflowStatus.status === "CompletedSuccessfully") {
|
||||
const resultIndexAvailable = await isArtifactAvailable(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId,
|
||||
RESULT_INDEX_ARTIFACT_NAME,
|
||||
);
|
||||
|
||||
if (resultIndexAvailable) {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
// We don't have a result-index yet, so we'll keep monitoring.
|
||||
} else if (workflowStatus.status !== "InProgress") {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
void this.logger.log("Variant analysis monitoring timed out after 2 days");
|
||||
return { status: "Cancelled" };
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user