Autofix import/no-namespace
I'm leaving the rule turned off as it still has 100+ offenses that aren't autofixable.
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
import * as gulp from "gulp";
|
||||
import { src, dest } from "gulp";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require("gulp-replace");
|
||||
|
||||
@@ -13,8 +13,7 @@ export function injectAppInsightsKey() {
|
||||
}
|
||||
|
||||
// replace the key
|
||||
return gulp
|
||||
.src(["out/telemetry.js"])
|
||||
return src(["out/telemetry.js"])
|
||||
.pipe(replace(/REPLACE-APP-INSIGHTS-KEY/, process.env.APP_INSIGHTS_KEY))
|
||||
.pipe(gulp.dest("out/"));
|
||||
.pipe(dest("out/"));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import {
|
||||
copy,
|
||||
readFile,
|
||||
mkdirs,
|
||||
readdir,
|
||||
unlinkSync,
|
||||
remove,
|
||||
writeFile,
|
||||
} from "fs-extra";
|
||||
import { resolve, join } from "path";
|
||||
|
||||
export interface DeployedPackage {
|
||||
distPath: string;
|
||||
@@ -25,12 +33,9 @@ async function copyPackage(
|
||||
): Promise<void> {
|
||||
for (const file of packageFiles) {
|
||||
console.log(
|
||||
`copying ${path.resolve(sourcePath, file)} to ${path.resolve(
|
||||
destPath,
|
||||
file,
|
||||
)}`,
|
||||
`copying ${resolve(sourcePath, file)} to ${resolve(destPath, file)}`,
|
||||
);
|
||||
await fs.copy(path.resolve(sourcePath, file), path.resolve(destPath, file));
|
||||
await copy(resolve(sourcePath, file), resolve(destPath, file));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,13 +44,13 @@ export async function deployPackage(
|
||||
): Promise<DeployedPackage> {
|
||||
try {
|
||||
const packageJson: any = JSON.parse(
|
||||
await fs.readFile(packageJsonPath, "utf8"),
|
||||
await readFile(packageJsonPath, "utf8"),
|
||||
);
|
||||
|
||||
// Default to development build; use flag --release to indicate release build.
|
||||
const isDevBuild = !process.argv.includes("--release");
|
||||
const distDir = path.join(__dirname, "../../../dist");
|
||||
await fs.mkdirs(distDir);
|
||||
const distDir = join(__dirname, "../../../dist");
|
||||
await mkdirs(distDir);
|
||||
|
||||
if (isDevBuild) {
|
||||
// NOTE: rootPackage.name had better not have any regex metacharacters
|
||||
@@ -54,11 +59,11 @@ export async function deployPackage(
|
||||
);
|
||||
// Dev package filenames are of the form
|
||||
// vscode-codeql-0.0.1-dev.2019.9.27.19.55.20.vsix
|
||||
(await fs.readdir(distDir))
|
||||
(await readdir(distDir))
|
||||
.filter((name) => name.match(oldDevBuildPattern))
|
||||
.map((build) => {
|
||||
console.log(`Deleting old dev build ${build}...`);
|
||||
fs.unlinkSync(path.join(distDir, build));
|
||||
unlinkSync(join(distDir, build));
|
||||
});
|
||||
const now = new Date();
|
||||
packageJson.version =
|
||||
@@ -69,16 +74,16 @@ export async function deployPackage(
|
||||
`.${now.getUTCHours()}.${now.getUTCMinutes()}.${now.getUTCSeconds()}`;
|
||||
}
|
||||
|
||||
const distPath = path.join(distDir, packageJson.name);
|
||||
await fs.remove(distPath);
|
||||
await fs.mkdirs(distPath);
|
||||
const distPath = join(distDir, packageJson.name);
|
||||
await remove(distPath);
|
||||
await mkdirs(distPath);
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(distPath, "package.json"),
|
||||
await writeFile(
|
||||
join(distPath, "package.json"),
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
);
|
||||
|
||||
const sourcePath = path.join(__dirname, "..");
|
||||
const sourcePath = join(__dirname, "..");
|
||||
console.log(
|
||||
`Copying package '${packageJson.name}' and its dependencies to '${distPath}'...`,
|
||||
);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as gulp from "gulp";
|
||||
import { series, parallel } from "gulp";
|
||||
import { compileTypeScript, watchTypeScript, cleanOutput } from "./typescript";
|
||||
import { compileTextMateGrammar } from "./textmate";
|
||||
import { copyTestData, watchTestData } from "./tests";
|
||||
@@ -6,9 +6,9 @@ import { compileView, watchView } from "./webpack";
|
||||
import { packageExtension } from "./package";
|
||||
import { injectAppInsightsKey } from "./appInsights";
|
||||
|
||||
export const buildWithoutPackage = gulp.series(
|
||||
export const buildWithoutPackage = series(
|
||||
cleanOutput,
|
||||
gulp.parallel(
|
||||
parallel(
|
||||
compileTypeScript,
|
||||
compileTextMateGrammar,
|
||||
compileView,
|
||||
@@ -27,7 +27,7 @@ export {
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
};
|
||||
export default gulp.series(
|
||||
export default series(
|
||||
buildWithoutPackage,
|
||||
injectAppInsightsKey,
|
||||
packageExtension,
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import * as path from "path";
|
||||
import { resolve } from "path";
|
||||
import { deployPackage } from "./deploy";
|
||||
import * as childProcess from "child-process-promise";
|
||||
import { spawn } from "child-process-promise";
|
||||
|
||||
export async function packageExtension(): Promise<void> {
|
||||
const deployedPackage = await deployPackage(path.resolve("package.json"));
|
||||
const deployedPackage = await deployPackage(resolve("package.json"));
|
||||
console.log(
|
||||
`Packaging extension '${deployedPackage.name}@${deployedPackage.version}'...`,
|
||||
);
|
||||
const args = [
|
||||
"package",
|
||||
"--out",
|
||||
path.resolve(
|
||||
resolve(
|
||||
deployedPackage.distPath,
|
||||
"..",
|
||||
`${deployedPackage.name}-${deployedPackage.version}.vsix`,
|
||||
),
|
||||
];
|
||||
const proc = childProcess.spawn("./node_modules/.bin/vsce", args, {
|
||||
const proc = spawn("./node_modules/.bin/vsce", args, {
|
||||
cwd: deployedPackage.distPath,
|
||||
});
|
||||
proc.childProcess.stdout!.on("data", (data) => {
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
import * as gulp from "gulp";
|
||||
import { watch, src, dest } from "gulp";
|
||||
|
||||
export function copyTestData() {
|
||||
return Promise.all([copyNoWorkspaceData(), copyCliIntegrationData()]);
|
||||
}
|
||||
|
||||
export function watchTestData() {
|
||||
return gulp.watch(["src/vscode-tests/*/data/**/*"], copyTestData);
|
||||
return watch(["src/vscode-tests/*/data/**/*"], copyTestData);
|
||||
}
|
||||
|
||||
function copyNoWorkspaceData() {
|
||||
return gulp
|
||||
.src("src/vscode-tests/no-workspace/data/**/*")
|
||||
.pipe(gulp.dest("out/vscode-tests/no-workspace/data"));
|
||||
return src("src/vscode-tests/no-workspace/data/**/*").pipe(
|
||||
dest("out/vscode-tests/no-workspace/data"),
|
||||
);
|
||||
}
|
||||
|
||||
function copyCliIntegrationData() {
|
||||
return gulp
|
||||
.src("src/vscode-tests/cli-integration/data/**/*")
|
||||
.pipe(gulp.dest("out/vscode-tests/cli-integration/data"));
|
||||
return src("src/vscode-tests/cli-integration/data/**/*").pipe(
|
||||
dest("out/vscode-tests/cli-integration/data"),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as gulp from "gulp";
|
||||
import * as jsYaml from "js-yaml";
|
||||
import * as through from "through2";
|
||||
import { src, dest } from "gulp";
|
||||
import { load } from "js-yaml";
|
||||
import { obj } from "through2";
|
||||
import * as PluginError from "plugin-error";
|
||||
import * as Vinyl from "vinyl";
|
||||
|
||||
@@ -219,7 +219,7 @@ function transformFile(yaml: any) {
|
||||
}
|
||||
|
||||
export function transpileTextMateGrammar() {
|
||||
return through.obj(
|
||||
return obj(
|
||||
(
|
||||
file: Vinyl,
|
||||
_encoding: string,
|
||||
@@ -230,7 +230,7 @@ export function transpileTextMateGrammar() {
|
||||
} else if (file.isBuffer()) {
|
||||
const buf: Buffer = file.contents;
|
||||
const yamlText: string = buf.toString("utf8");
|
||||
const jsonData: any = jsYaml.load(yamlText);
|
||||
const jsonData: any = load(yamlText);
|
||||
transformFile(jsonData);
|
||||
|
||||
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), "utf8");
|
||||
@@ -247,8 +247,7 @@ export function transpileTextMateGrammar() {
|
||||
}
|
||||
|
||||
export function compileTextMateGrammar() {
|
||||
return gulp
|
||||
.src("syntaxes/*.tmLanguage.yml")
|
||||
return src("syntaxes/*.tmLanguage.yml")
|
||||
.pipe(transpileTextMateGrammar())
|
||||
.pipe(gulp.dest("out/syntaxes"));
|
||||
.pipe(dest("out/syntaxes"));
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as colors from "ansi-colors";
|
||||
import * as gulp from "gulp";
|
||||
import * as sourcemaps from "gulp-sourcemaps";
|
||||
import { gray, red } from "ansi-colors";
|
||||
import { dest, watch } from "gulp";
|
||||
import { init, write } from "gulp-sourcemaps";
|
||||
import * as ts from "gulp-typescript";
|
||||
import * as del from "del";
|
||||
|
||||
@@ -10,9 +10,9 @@ function goodReporter(): ts.reporter.Reporter {
|
||||
if (error.tsFile) {
|
||||
console.log(
|
||||
"[" +
|
||||
colors.gray("gulp-typescript") +
|
||||
gray("gulp-typescript") +
|
||||
"] " +
|
||||
colors.red(
|
||||
red(
|
||||
error.fullFilename +
|
||||
"(" +
|
||||
(error.startPosition!.line + 1) +
|
||||
@@ -46,17 +46,17 @@ export function cleanOutput() {
|
||||
export function compileTypeScript() {
|
||||
return tsProject
|
||||
.src()
|
||||
.pipe(sourcemaps.init())
|
||||
.pipe(init())
|
||||
.pipe(tsProject(goodReporter()))
|
||||
.pipe(
|
||||
sourcemaps.write(".", {
|
||||
write(".", {
|
||||
includeContent: false,
|
||||
sourceRoot: ".",
|
||||
}),
|
||||
)
|
||||
.pipe(gulp.dest("out"));
|
||||
.pipe(dest("out"));
|
||||
}
|
||||
|
||||
export function watchTypeScript() {
|
||||
gulp.watch("src/**/*.ts", compileTypeScript);
|
||||
watch("src/**/*.ts", compileTypeScript);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { resolve } from "path";
|
||||
import * as webpack from "webpack";
|
||||
import * as MiniCssExtractPlugin from "mini-css-extract-plugin";
|
||||
|
||||
@@ -8,7 +8,7 @@ export const config: webpack.Configuration = {
|
||||
webview: "./src/view/webview.tsx",
|
||||
},
|
||||
output: {
|
||||
path: path.resolve(__dirname, "..", "out"),
|
||||
path: resolve(__dirname, "..", "out"),
|
||||
filename: "[name].js",
|
||||
},
|
||||
devtool: "inline-source-map",
|
||||
|
||||
@@ -11,8 +11,8 @@
|
||||
* Usage: npx ts-node scripts/add-fields-to-scenarios.ts
|
||||
*/
|
||||
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, readJson, writeJson } from "fs-extra";
|
||||
import { resolve, relative } from "path";
|
||||
|
||||
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
|
||||
import { throttling } from "@octokit/plugin-throttling";
|
||||
@@ -23,11 +23,8 @@ import { isGetVariantAnalysisRequest } from "../src/mocks/gh-api-request";
|
||||
import { VariantAnalysis } from "../src/remote-queries/gh-api/variant-analysis";
|
||||
import { RepositoryWithMetadata } from "../src/remote-queries/gh-api/repository";
|
||||
|
||||
const extensionDirectory = path.resolve(__dirname, "..");
|
||||
const scenariosDirectory = path.resolve(
|
||||
extensionDirectory,
|
||||
"src/mocks/scenarios",
|
||||
);
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
const scenariosDirectory = resolve(extensionDirectory, "src/mocks/scenarios");
|
||||
|
||||
// Make sure we don't run into rate limits by automatically waiting until we can
|
||||
// make another request.
|
||||
@@ -84,7 +81,7 @@ async function addFieldsToRepository(repository: RepositoryWithMetadata) {
|
||||
}
|
||||
|
||||
async function addFieldsToScenarios() {
|
||||
if (!(await fs.pathExists(scenariosDirectory))) {
|
||||
if (!(await pathExists(scenariosDirectory))) {
|
||||
console.error("Scenarios directory does not exist: " + scenariosDirectory);
|
||||
return;
|
||||
}
|
||||
@@ -94,7 +91,7 @@ async function addFieldsToScenarios() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const data: GitHubApiRequest = await fs.readJson(file);
|
||||
const data: GitHubApiRequest = await readJson(file);
|
||||
|
||||
if (!isGetVariantAnalysisRequest(data)) {
|
||||
continue;
|
||||
@@ -104,9 +101,7 @@ async function addFieldsToScenarios() {
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Adding fields to '${path.relative(scenariosDirectory, file)}'`,
|
||||
);
|
||||
console.log(`Adding fields to '${relative(scenariosDirectory, file)}'`);
|
||||
|
||||
const variantAnalysis = data.response.body as VariantAnalysis;
|
||||
|
||||
@@ -137,7 +132,7 @@ async function addFieldsToScenarios() {
|
||||
}
|
||||
}
|
||||
|
||||
await fs.writeJson(file, data, { spaces: 2 });
|
||||
await writeJson(file, data, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
* Usage: npx ts-node scripts/fix-scenario-file-numbering.ts <scenario-name>
|
||||
*/
|
||||
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, readdir, rename, readJson, writeJSON } from "fs-extra";
|
||||
import { resolve, extname, basename, join } from "path";
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error("Expected 1 argument - the scenario name");
|
||||
@@ -19,21 +19,18 @@ if (process.argv.length !== 3) {
|
||||
|
||||
const scenarioName = process.argv[2];
|
||||
|
||||
const extensionDirectory = path.resolve(__dirname, "..");
|
||||
const scenariosDirectory = path.resolve(
|
||||
extensionDirectory,
|
||||
"src/mocks/scenarios",
|
||||
);
|
||||
const scenarioDirectory = path.resolve(scenariosDirectory, scenarioName);
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
const scenariosDirectory = resolve(extensionDirectory, "src/mocks/scenarios");
|
||||
const scenarioDirectory = resolve(scenariosDirectory, scenarioName);
|
||||
|
||||
async function fixScenarioFiles() {
|
||||
console.log(scenarioDirectory);
|
||||
if (!(await fs.pathExists(scenarioDirectory))) {
|
||||
if (!(await pathExists(scenarioDirectory))) {
|
||||
console.error("Scenario directory does not exist: " + scenarioDirectory);
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await fs.readdir(scenarioDirectory);
|
||||
const files = await readdir(scenarioDirectory);
|
||||
|
||||
const orderedFiles = files.sort((a, b) => {
|
||||
const aNum = parseInt(a.split("-")[0]);
|
||||
@@ -43,30 +40,30 @@ async function fixScenarioFiles() {
|
||||
|
||||
let index = 0;
|
||||
for (const file of orderedFiles) {
|
||||
const ext = path.extname(file);
|
||||
const ext = extname(file);
|
||||
if (ext === ".json") {
|
||||
const fileName = path.basename(file, ext);
|
||||
const fileName = basename(file, ext);
|
||||
const fileCurrentIndex = parseInt(fileName.split("-")[0]);
|
||||
const fileNameWithoutIndex = fileName.split("-")[1];
|
||||
if (fileCurrentIndex !== index) {
|
||||
const newFileName = `${index}-${fileNameWithoutIndex}${ext}`;
|
||||
const oldFilePath = path.join(scenarioDirectory, file);
|
||||
const newFilePath = path.join(scenarioDirectory, newFileName);
|
||||
const oldFilePath = join(scenarioDirectory, file);
|
||||
const newFilePath = join(scenarioDirectory, newFileName);
|
||||
console.log(`Rename: ${oldFilePath} -> ${newFilePath}`);
|
||||
await fs.rename(oldFilePath, newFilePath);
|
||||
await rename(oldFilePath, newFilePath);
|
||||
|
||||
if (fileNameWithoutIndex === "getVariantAnalysisRepoResult") {
|
||||
const oldZipFileName = `${fileCurrentIndex}-getVariantAnalysisRepoResult.body.zip`;
|
||||
const newZipFileName = `${index}-getVariantAnalysisRepoResult.body.zip`;
|
||||
const oldZipFilePath = path.join(scenarioDirectory, oldZipFileName);
|
||||
const newZipFilePath = path.join(scenarioDirectory, newZipFileName);
|
||||
const oldZipFilePath = join(scenarioDirectory, oldZipFileName);
|
||||
const newZipFilePath = join(scenarioDirectory, newZipFileName);
|
||||
console.log(`Rename: ${oldZipFilePath} -> ${newZipFilePath}`);
|
||||
await fs.rename(oldZipFilePath, newZipFilePath);
|
||||
await rename(oldZipFilePath, newZipFilePath);
|
||||
|
||||
const json = await fs.readJson(newFilePath);
|
||||
const json = await readJson(newFilePath);
|
||||
json.response.body = `file:${newZipFileName}`;
|
||||
console.log(`Response.body change to ${json.response.body}`);
|
||||
await fs.writeJSON(newFilePath, json);
|
||||
await writeJSON(newFilePath, json);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,31 +1,26 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, readFile } from "fs-extra";
|
||||
import { resolve, relative } from "path";
|
||||
|
||||
import Ajv from "ajv";
|
||||
import * as tsj from "ts-json-schema-generator";
|
||||
import { createGenerator } from "ts-json-schema-generator";
|
||||
|
||||
import { getFiles } from "./util/files";
|
||||
|
||||
const extensionDirectory = path.resolve(__dirname, "..");
|
||||
const rootDirectory = path.resolve(extensionDirectory, "../..");
|
||||
const scenariosDirectory = path.resolve(
|
||||
extensionDirectory,
|
||||
"src/mocks/scenarios",
|
||||
);
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
const rootDirectory = resolve(extensionDirectory, "../..");
|
||||
const scenariosDirectory = resolve(extensionDirectory, "src/mocks/scenarios");
|
||||
|
||||
const debug = process.env.RUNNER_DEBUG || process.argv.includes("--debug");
|
||||
|
||||
async function lintScenarios() {
|
||||
const schema = tsj
|
||||
.createGenerator({
|
||||
path: path.resolve(extensionDirectory, "src/mocks/gh-api-request.ts"),
|
||||
tsconfig: path.resolve(extensionDirectory, "tsconfig.json"),
|
||||
type: "GitHubApiRequest",
|
||||
skipTypeCheck: true,
|
||||
topRef: true,
|
||||
additionalProperties: true,
|
||||
})
|
||||
.createSchema("GitHubApiRequest");
|
||||
const schema = createGenerator({
|
||||
path: resolve(extensionDirectory, "src/mocks/gh-api-request.ts"),
|
||||
tsconfig: resolve(extensionDirectory, "tsconfig.json"),
|
||||
type: "GitHubApiRequest",
|
||||
skipTypeCheck: true,
|
||||
topRef: true,
|
||||
additionalProperties: true,
|
||||
}).createSchema("GitHubApiRequest");
|
||||
|
||||
const ajv = new Ajv();
|
||||
|
||||
@@ -37,7 +32,7 @@ async function lintScenarios() {
|
||||
|
||||
let invalidFiles = 0;
|
||||
|
||||
if (!(await fs.pathExists(scenariosDirectory))) {
|
||||
if (!(await pathExists(scenariosDirectory))) {
|
||||
console.error("Scenarios directory does not exist: " + scenariosDirectory);
|
||||
// Do not exit with a non-zero status code, as this is not a fatal error.
|
||||
return;
|
||||
@@ -48,21 +43,21 @@ async function lintScenarios() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const contents = await fs.readFile(file, "utf8");
|
||||
const contents = await readFile(file, "utf8");
|
||||
const data = JSON.parse(contents);
|
||||
|
||||
if (!validate(data)) {
|
||||
validate.errors?.forEach((error) => {
|
||||
// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message
|
||||
console.log(
|
||||
`::error file=${path.relative(rootDirectory, file)}::${
|
||||
`::error file=${relative(rootDirectory, file)}::${
|
||||
error.instancePath
|
||||
}: ${error.message}`,
|
||||
);
|
||||
});
|
||||
invalidFiles++;
|
||||
} else if (debug) {
|
||||
console.log(`File '${path.relative(rootDirectory, file)}' is valid`);
|
||||
console.log(`File '${relative(rootDirectory, file)}' is valid`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { readdir } from "fs-extra";
|
||||
import { resolve } from "path";
|
||||
|
||||
// https://stackoverflow.com/a/45130990
|
||||
export async function* getFiles(dir: string): AsyncGenerator<string> {
|
||||
const dirents = await fs.readdir(dir, { withFileTypes: true });
|
||||
const dirents = await readdir(dir, { withFileTypes: true });
|
||||
for (const dirent of dirents) {
|
||||
const res = path.resolve(dir, dirent.name);
|
||||
const res = resolve(dir, dirent.name);
|
||||
if (dirent.isDirectory()) {
|
||||
yield* getFiles(res);
|
||||
} else {
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
WebviewPanelOptions,
|
||||
WebviewOptions,
|
||||
} from "vscode";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
import { DisposableObject, DisposeHandler } from "./pure/disposable-object";
|
||||
import { tmpDir } from "./helpers";
|
||||
@@ -83,7 +83,7 @@ export abstract class AbstractWebview<
|
||||
localResourceRoots: [
|
||||
...(config.additionalOptions?.localResourceRoots ?? []),
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(path.join(ctx.extensionPath, "out")),
|
||||
Uri.file(join(ctx.extensionPath, "out")),
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists } from "fs-extra";
|
||||
import * as unzipper from "unzipper";
|
||||
import * as vscode from "vscode";
|
||||
import { extLogger } from "./common";
|
||||
|
||||
// All path operations in this file must be on paths *within* the zip
|
||||
// archive.
|
||||
import * as _path from "path";
|
||||
const path = _path.posix;
|
||||
import { posix } from "path";
|
||||
const path = posix;
|
||||
|
||||
export class File implements vscode.FileStat {
|
||||
type: vscode.FileType;
|
||||
@@ -176,7 +176,7 @@ type Archive = {
|
||||
};
|
||||
|
||||
async function parse_zip(zipPath: string): Promise<Archive> {
|
||||
if (!(await fs.pathExists(zipPath)))
|
||||
if (!(await pathExists(zipPath)))
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
const archive: Archive = {
|
||||
unzipped: await unzipper.Open.file(zipPath),
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
Range,
|
||||
Uri,
|
||||
} from "vscode";
|
||||
import * as path from "path";
|
||||
import { basename } from "path";
|
||||
|
||||
import { DatabaseItem } from "./databases";
|
||||
import { UrlValue, BqrsId } from "./pure/bqrs-cli-types";
|
||||
@@ -138,7 +138,7 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.treeView.message = `AST for ${basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as cpp from "child-process-promise";
|
||||
import { spawn } from "child-process-promise";
|
||||
import * as child_process from "child_process";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { readFile } from "fs-extra";
|
||||
import { dirname, join, delimiter } from "path";
|
||||
import * as sarif from "sarif";
|
||||
import { SemVer } from "semver";
|
||||
import { Readable } from "stream";
|
||||
@@ -414,7 +414,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
|
||||
// Spawn the CodeQL process
|
||||
const codeqlPath = await this.getCodeQlPath();
|
||||
const childPromise = cpp.spawn(codeqlPath, args);
|
||||
const childPromise = spawn(codeqlPath, args);
|
||||
const child = childPromise.childProcess;
|
||||
|
||||
let cancellationRegistration: Disposable | undefined = undefined;
|
||||
@@ -690,10 +690,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
): Promise<MlModelsInfo> {
|
||||
const args = (await this.cliConstraints.supportsPreciseResolveMlModels())
|
||||
? // use the dirname of the path so that we can handle query libraries
|
||||
[
|
||||
...this.getAdditionalPacksArg(additionalPacks),
|
||||
path.dirname(queryPath),
|
||||
]
|
||||
[...this.getAdditionalPacksArg(additionalPacks), dirname(queryPath)]
|
||||
: this.getAdditionalPacksArg(additionalPacks);
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||
["resolve", "ml-models"],
|
||||
@@ -918,7 +915,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const dotFiles: Array<Promise<string>> = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
if (file.endsWith(".dot")) {
|
||||
dotFiles.push(fs.readFile(file, "utf8"));
|
||||
dotFiles.push(readFile(file, "utf8"));
|
||||
}
|
||||
}
|
||||
return Promise.all(dotFiles);
|
||||
@@ -1066,7 +1063,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
): Promise<QlpacksInfo> {
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath?.length) {
|
||||
args.push("--search-path", path.join(...searchPath));
|
||||
args.push("--search-path", join(...searchPath));
|
||||
}
|
||||
|
||||
return this.runJsonCodeQlCliCommand<QlpacksInfo>(
|
||||
@@ -1122,7 +1119,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
): Promise<string[]> {
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath !== undefined) {
|
||||
args.push("--search-path", path.join(...searchPath));
|
||||
args.push("--search-path", join(...searchPath));
|
||||
}
|
||||
if (await this.cliConstraints.supportsAllowLibraryPacksInResolveQueries()) {
|
||||
// All of our usage of `codeql resolve queries` needs to handle library packs.
|
||||
@@ -1253,9 +1250,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
}
|
||||
|
||||
private getAdditionalPacksArg(paths: string[]): string[] {
|
||||
return paths.length
|
||||
? ["--additional-packs", paths.join(path.delimiter)]
|
||||
: [];
|
||||
return paths.length ? ["--additional-packs", paths.join(delimiter)] : [];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { window as Window, OutputChannel, Progress } from "vscode";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { ensureFile, appendFile } from "fs-extra";
|
||||
import { isAbsolute } from "path";
|
||||
import { Logger, LogOptions } from "../logger";
|
||||
import { DisposableObject } from "../../../pure/disposable-object";
|
||||
|
||||
@@ -34,7 +34,7 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
}
|
||||
|
||||
if (options.additionalLogLocation) {
|
||||
if (!path.isAbsolute(options.additionalLogLocation)) {
|
||||
if (!isAbsolute(options.additionalLogLocation)) {
|
||||
throw new Error(
|
||||
`Additional Log Location must be an absolute path: ${options.additionalLogLocation}`,
|
||||
);
|
||||
@@ -84,9 +84,9 @@ class AdditionalLogLocation {
|
||||
if (options.trailingNewline === undefined) {
|
||||
options.trailingNewline = true;
|
||||
}
|
||||
await fs.ensureFile(this.location);
|
||||
await ensureFile(this.location);
|
||||
|
||||
await fs.appendFile(
|
||||
await appendFile(
|
||||
this.location,
|
||||
message + (options.trailingNewline ? "\n" : ""),
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as vscode from "vscode";
|
||||
import { EventEmitter } from "vscode";
|
||||
import { AppEventEmitter } from "../events";
|
||||
|
||||
export class VSCodeAppEventEmitter<T>
|
||||
extends vscode.EventEmitter<T>
|
||||
extends EventEmitter<T>
|
||||
implements AppEventEmitter<T> {}
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as tmp from "tmp-promise";
|
||||
import * as path from "path";
|
||||
import { writeFile, promises } from "fs-extra";
|
||||
import { dump } from "js-yaml";
|
||||
import { file } from "tmp-promise";
|
||||
import { basename, dirname, resolve } from "path";
|
||||
|
||||
import * as helpers from "../helpers";
|
||||
import {
|
||||
getPrimaryDbscheme,
|
||||
getQlPackForDbscheme,
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
QlPacksForLanguage,
|
||||
} from "../helpers";
|
||||
import { KeyType, kindOfKeyType, nameOfKeyType, tagOfKeyType } from "./keyType";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
import { DatabaseItem } from "../databases";
|
||||
import { QlPacksForLanguage } from "../helpers";
|
||||
import { extLogger } from "../common";
|
||||
import { createInitialQueryInfo } from "../run-queries-shared";
|
||||
import { CancellationToken, Uri } from "vscode";
|
||||
@@ -22,8 +27,8 @@ export async function qlpackOfDatabase(
|
||||
throw new Error("Database is invalid and cannot infer QLPack.");
|
||||
}
|
||||
const datasetPath = db.contents.datasetUri.fsPath;
|
||||
const dbscheme = await helpers.getPrimaryDbscheme(datasetPath);
|
||||
return await helpers.getQlPackForDbscheme(cli, dbscheme);
|
||||
const dbscheme = await getPrimaryDbscheme(datasetPath);
|
||||
return await getQlPackForDbscheme(cli, dbscheme);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -40,7 +45,7 @@ async function resolveQueriesFromPacks(
|
||||
keyType: KeyType,
|
||||
): Promise<string[]> {
|
||||
const suiteFile = (
|
||||
await tmp.file({
|
||||
await file({
|
||||
postfix: ".qls",
|
||||
})
|
||||
).path;
|
||||
@@ -55,11 +60,11 @@ async function resolveQueriesFromPacks(
|
||||
},
|
||||
});
|
||||
}
|
||||
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), "utf8");
|
||||
await writeFile(suiteFile, dump(suiteYaml), "utf8");
|
||||
|
||||
const queries = await cli.resolveQueriesInSuite(
|
||||
suiteFile,
|
||||
helpers.getOnDiskWorkspaceFolders(),
|
||||
getOnDiskWorkspaceFolders(),
|
||||
);
|
||||
return queries;
|
||||
}
|
||||
@@ -124,7 +129,7 @@ export async function resolveQueries(
|
||||
)} queries are not yet available \
|
||||
for this language.`;
|
||||
|
||||
void helpers.showAndLogErrorMessage(errorMessage);
|
||||
void showAndLogErrorMessage(errorMessage);
|
||||
throw new Error(
|
||||
`Couldn't find any queries tagged ${tagOfKeyType(
|
||||
keyType,
|
||||
@@ -144,7 +149,7 @@ async function resolveContextualQuery(
|
||||
// Work out the enclosing pack.
|
||||
const packContents = await cli.packPacklist(query, false);
|
||||
const packFilePath = packContents.find((p) =>
|
||||
["codeql-pack.yml", "qlpack.yml"].includes(path.basename(p)),
|
||||
["codeql-pack.yml", "qlpack.yml"].includes(basename(p)),
|
||||
);
|
||||
if (packFilePath === undefined) {
|
||||
// Should not happen; we already resolved this query.
|
||||
@@ -152,9 +157,9 @@ async function resolveContextualQuery(
|
||||
`Could not find a CodeQL pack file for the pack enclosing the contextual query ${query}`,
|
||||
);
|
||||
}
|
||||
const packPath = path.dirname(packFilePath);
|
||||
const packPath = dirname(packFilePath);
|
||||
const lockFilePath = packContents.find((p) =>
|
||||
["codeql-pack.lock.yml", "qlpack.lock.yml"].includes(path.basename(p)),
|
||||
["codeql-pack.lock.yml", "qlpack.lock.yml"].includes(basename(p)),
|
||||
);
|
||||
let createdTempLockFile = false;
|
||||
if (!lockFilePath) {
|
||||
@@ -180,12 +185,12 @@ async function resolveContextualQuery(
|
||||
}
|
||||
|
||||
async function removeTemporaryLockFile(packPath: string) {
|
||||
const tempLockFilePath = path.resolve(packPath, "codeql-pack.lock.yml");
|
||||
const tempLockFilePath = resolve(packPath, "codeql-pack.lock.yml");
|
||||
void extLogger.log(
|
||||
`Deleting temporary package lock file at ${tempLockFilePath}`,
|
||||
);
|
||||
// It's fine if the file doesn't exist.
|
||||
await fs.promises.rm(path.resolve(packPath, "codeql-pack.lock.yml"), {
|
||||
await promises.rm(resolve(packPath, "codeql-pack.lock.yml"), {
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import fetch, { Response } from "node-fetch";
|
||||
import { zip } from "zip-a-folder";
|
||||
import * as unzipper from "unzipper";
|
||||
import { Open } from "unzipper";
|
||||
import { Uri, CancellationToken, commands, window } from "vscode";
|
||||
import { CodeQLCliServer } from "./cli";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import {
|
||||
ensureDir,
|
||||
realpath as fs_realpath,
|
||||
pathExists,
|
||||
createWriteStream,
|
||||
remove,
|
||||
stat,
|
||||
readdir,
|
||||
} from "fs-extra";
|
||||
import { basename, join } from "path";
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
|
||||
@@ -284,7 +292,7 @@ async function databaseArchiveFetcher(
|
||||
if (!storagePath) {
|
||||
throw new Error("No storage path specified.");
|
||||
}
|
||||
await fs.ensureDir(storagePath);
|
||||
await ensureDir(storagePath);
|
||||
const unzipPath = await getStorageFolder(storagePath, databaseUrl);
|
||||
|
||||
if (isFile(databaseUrl)) {
|
||||
@@ -333,19 +341,19 @@ async function getStorageFolder(storagePath: string, urlStr: string) {
|
||||
const url = Uri.parse(urlStr);
|
||||
// MacOS has a max filename length of 255
|
||||
// and remove a few extra chars in case we need to add a counter at the end.
|
||||
let lastName = path.basename(url.path).substring(0, 250);
|
||||
let lastName = basename(url.path).substring(0, 250);
|
||||
if (lastName.endsWith(".zip")) {
|
||||
lastName = lastName.substring(0, lastName.length - 4);
|
||||
}
|
||||
|
||||
const realpath = await fs.realpath(storagePath);
|
||||
let folderName = path.join(realpath, lastName);
|
||||
const realpath = await fs_realpath(storagePath);
|
||||
let folderName = join(realpath, lastName);
|
||||
|
||||
// avoid overwriting existing folders
|
||||
let counter = 0;
|
||||
while (await fs.pathExists(folderName)) {
|
||||
while (await pathExists(folderName)) {
|
||||
counter++;
|
||||
folderName = path.join(realpath, `${lastName}-${counter}`);
|
||||
folderName = join(realpath, `${lastName}-${counter}`);
|
||||
if (counter > 100) {
|
||||
throw new Error("Could not find a unique name for downloaded database.");
|
||||
}
|
||||
@@ -378,7 +386,7 @@ async function readAndUnzip(
|
||||
progress?.({
|
||||
maxStep: 10,
|
||||
step: 9,
|
||||
message: `Unzipping into ${path.basename(unzipPath)}`,
|
||||
message: `Unzipping into ${basename(unzipPath)}`,
|
||||
});
|
||||
if (cli && (await cli.cliConstraints.supportsDatabaseUnbundle())) {
|
||||
// Use the `database unbundle` command if the installed cli version supports it
|
||||
@@ -387,7 +395,7 @@ async function readAndUnzip(
|
||||
// Must get the zip central directory since streaming the
|
||||
// zip contents may not have correct local file headers.
|
||||
// Instead, we can only rely on the central directory.
|
||||
const directory = await unzipper.Open.file(zipFile);
|
||||
const directory = await Open.file(zipFile);
|
||||
await directory.extract({ path: unzipPath });
|
||||
}
|
||||
}
|
||||
@@ -405,7 +413,7 @@ async function fetchAndUnzip(
|
||||
// file headers may be incorrect. Additionally, saving to file first will reduce memory
|
||||
// pressure compared with unzipping while downloading the archive.
|
||||
|
||||
const archivePath = path.join(tmpDir.name, `archive-${Date.now()}.zip`);
|
||||
const archivePath = join(tmpDir.name, `archive-${Date.now()}.zip`);
|
||||
|
||||
progress?.({
|
||||
maxStep: 3,
|
||||
@@ -417,7 +425,7 @@ async function fetchAndUnzip(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
"Error downloading database",
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
const archiveFileStream = createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get("content-length");
|
||||
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
|
||||
@@ -443,7 +451,7 @@ async function fetchAndUnzip(
|
||||
);
|
||||
|
||||
// remove archivePath eagerly since these archives can be large.
|
||||
await fs.remove(archivePath);
|
||||
await remove(archivePath);
|
||||
}
|
||||
|
||||
async function checkForFailingResponse(
|
||||
@@ -484,15 +492,15 @@ export async function findDirWithFile(
|
||||
dir: string,
|
||||
...toFind: string[]
|
||||
): Promise<string | undefined> {
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
if (!(await stat(dir)).isDirectory()) {
|
||||
return;
|
||||
}
|
||||
const files = await fs.readdir(dir);
|
||||
const files = await readdir(dir);
|
||||
if (toFind.some((file) => files.includes(file))) {
|
||||
return dir;
|
||||
}
|
||||
for (const file of files) {
|
||||
const newPath = path.join(dir, file);
|
||||
const newPath = join(dir, file);
|
||||
const result = await findDirWithFile(newPath, ...toFind);
|
||||
if (result) {
|
||||
return result;
|
||||
@@ -744,14 +752,11 @@ async function promptForLanguage(
|
||||
* @param databasePath The full path to the unzipped database
|
||||
*/
|
||||
async function ensureZippedSourceLocation(databasePath: string): Promise<void> {
|
||||
const srcFolderPath = path.join(databasePath, "src");
|
||||
const srcFolderPath = join(databasePath, "src");
|
||||
const srcZipPath = srcFolderPath + ".zip";
|
||||
|
||||
if (
|
||||
(await fs.pathExists(srcFolderPath)) &&
|
||||
!(await fs.pathExists(srcZipPath))
|
||||
) {
|
||||
if ((await pathExists(srcFolderPath)) && !(await pathExists(srcZipPath))) {
|
||||
await zip(srcFolderPath, srcZipPath);
|
||||
await fs.remove(srcFolderPath);
|
||||
await remove(srcFolderPath);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join, basename, dirname as path_dirname } from "path";
|
||||
import { DisposableObject } from "./pure/disposable-object";
|
||||
import {
|
||||
Event,
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
env,
|
||||
CancellationToken,
|
||||
} from "vscode";
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists, stat, readdir, remove } from "fs-extra";
|
||||
|
||||
import {
|
||||
DatabaseChangedEvent,
|
||||
@@ -61,10 +61,10 @@ function joinThemableIconPath(
|
||||
): ThemableIconPath {
|
||||
if (typeof iconPath == "object")
|
||||
return {
|
||||
light: path.join(base, iconPath.light),
|
||||
dark: path.join(base, iconPath.dark),
|
||||
light: join(base, iconPath.light),
|
||||
dark: join(base, iconPath.dark),
|
||||
};
|
||||
else return path.join(base, iconPath);
|
||||
else return join(base, iconPath);
|
||||
}
|
||||
|
||||
enum SortOrder {
|
||||
@@ -397,8 +397,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
let dbDirs = undefined;
|
||||
|
||||
if (
|
||||
!(await fs.pathExists(this.storagePath)) ||
|
||||
!(await fs.stat(this.storagePath)).isDirectory()
|
||||
!(await pathExists(this.storagePath)) ||
|
||||
!(await stat(this.storagePath)).isDirectory()
|
||||
) {
|
||||
void extLogger.log(
|
||||
"Missing or invalid storage directory. Not trying to remove orphaned databases.",
|
||||
@@ -408,11 +408,11 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
dbDirs =
|
||||
// read directory
|
||||
(await fs.readdir(this.storagePath, { withFileTypes: true }))
|
||||
(await readdir(this.storagePath, { withFileTypes: true }))
|
||||
// remove non-directories
|
||||
.filter((dirent) => dirent.isDirectory())
|
||||
// get the full path
|
||||
.map((dirent) => path.join(this.storagePath, dirent.name))
|
||||
.map((dirent) => join(this.storagePath, dirent.name))
|
||||
// remove databases still in workspace
|
||||
.filter((dbDir) => {
|
||||
const dbUri = Uri.file(dbDir);
|
||||
@@ -435,15 +435,15 @@ export class DatabaseUI extends DisposableObject {
|
||||
dbDirs.map(async (dbDir) => {
|
||||
try {
|
||||
void extLogger.log(`Deleting orphaned database '${dbDir}'.`);
|
||||
await fs.remove(dbDir);
|
||||
await remove(dbDir);
|
||||
} catch (e) {
|
||||
failures.push(`${path.basename(dbDir)}`);
|
||||
failures.push(`${basename(dbDir)}`);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
if (failures.length) {
|
||||
const dirname = path.dirname(failures[0]);
|
||||
const dirname = path_dirname(failures[0]);
|
||||
void showAndLogErrorMessage(
|
||||
`Failed to delete unused databases (${failures.join(
|
||||
", ",
|
||||
@@ -620,7 +620,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(
|
||||
`Could not set database to ${basename(
|
||||
uri.fsPath,
|
||||
)}. Reason: ${getErrorMessage(e)}`,
|
||||
);
|
||||
@@ -774,12 +774,12 @@ export class DatabaseUI extends DisposableObject {
|
||||
*/
|
||||
private async fixDbUri(uri: Uri): Promise<Uri> {
|
||||
let dbPath = uri.fsPath;
|
||||
if ((await fs.stat(dbPath)).isFile()) {
|
||||
dbPath = path.dirname(dbPath);
|
||||
if ((await stat(dbPath)).isFile()) {
|
||||
dbPath = path_dirname(dbPath);
|
||||
}
|
||||
|
||||
if (await isLikelyDbLanguageFolder(dbPath)) {
|
||||
dbPath = path.dirname(dbPath);
|
||||
dbPath = path_dirname(dbPath);
|
||||
}
|
||||
return Uri.file(dbPath);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists, stat, remove } from "fs-extra";
|
||||
import * as glob from "glob-promise";
|
||||
import * as path from "path";
|
||||
import { join, basename, resolve, relative, dirname, extname } from "path";
|
||||
import * as vscode from "vscode";
|
||||
import * as cli from "./cli";
|
||||
import { ExtensionContext } from "vscode";
|
||||
@@ -115,7 +115,7 @@ async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
|
||||
);
|
||||
}
|
||||
|
||||
const dbAbsolutePath = path.join(parentDirectory, dbRelativePaths[0]);
|
||||
const dbAbsolutePath = join(parentDirectory, dbRelativePaths[0]);
|
||||
if (dbRelativePaths.length > 1) {
|
||||
void showAndLogWarningMessage(
|
||||
`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`,
|
||||
@@ -132,13 +132,13 @@ export async function findSourceArchive(
|
||||
const relativePaths = ["src", "output/src_archive"];
|
||||
|
||||
for (const relativePath of relativePaths) {
|
||||
const basePath = path.join(databasePath, relativePath);
|
||||
const basePath = join(databasePath, relativePath);
|
||||
const zipPath = basePath + ".zip";
|
||||
|
||||
// Prefer using a zip archive over a directory.
|
||||
if (await fs.pathExists(zipPath)) {
|
||||
if (await pathExists(zipPath)) {
|
||||
return encodeArchiveBasePath(zipPath);
|
||||
} else if (await fs.pathExists(basePath)) {
|
||||
} else if (await pathExists(basePath)) {
|
||||
return vscode.Uri.file(basePath);
|
||||
}
|
||||
}
|
||||
@@ -152,7 +152,7 @@ export async function findSourceArchive(
|
||||
async function resolveDatabase(
|
||||
databasePath: string,
|
||||
): Promise<DatabaseContents> {
|
||||
const name = path.basename(databasePath);
|
||||
const name = basename(databasePath);
|
||||
|
||||
// Look for dataset and source archive.
|
||||
const datasetUri = await findDataset(databasePath);
|
||||
@@ -180,7 +180,7 @@ async function resolveDatabaseContents(
|
||||
);
|
||||
}
|
||||
const databasePath = uri.fsPath;
|
||||
if (!(await fs.pathExists(databasePath))) {
|
||||
if (!(await pathExists(databasePath))) {
|
||||
throw new InvalidDatabaseError(
|
||||
`Database '${databasePath}' does not exist.`,
|
||||
);
|
||||
@@ -207,9 +207,7 @@ async function resolveDatabaseContents(
|
||||
`Database '${databasePath}' contains multiple CodeQL dbschemes under '${dbPath}'.`,
|
||||
);
|
||||
} else {
|
||||
contents.dbSchemeUri = vscode.Uri.file(
|
||||
path.resolve(dbPath, dbSchemeFiles[0]),
|
||||
);
|
||||
contents.dbSchemeUri = vscode.Uri.file(resolve(dbPath, dbSchemeFiles[0]));
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
@@ -336,7 +334,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
} else if (this._contents) {
|
||||
return this._contents.name;
|
||||
} else {
|
||||
return path.basename(this.databaseUri.fsPath);
|
||||
return basename(this.databaseUri.fsPath);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -518,14 +516,14 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
const stats = await fs.stat(testPath);
|
||||
const stats = await stat(testPath);
|
||||
if (stats.isDirectory()) {
|
||||
return !path.relative(testPath, databasePath).startsWith("..");
|
||||
return !relative(testPath, databasePath).startsWith("..");
|
||||
} else {
|
||||
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
|
||||
const testdir = path.dirname(testPath);
|
||||
const testdirbase = path.basename(testdir);
|
||||
return databasePath == path.join(testdir, testdirbase + ".testproj");
|
||||
const testdir = dirname(testPath);
|
||||
const testdirbase = basename(testdir);
|
||||
return databasePath == join(testdir, testdirbase + ".testproj");
|
||||
}
|
||||
} catch {
|
||||
// No information available for test path - assume database is unaffected.
|
||||
@@ -597,7 +595,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
): Promise<DatabaseItem> {
|
||||
const contents = await resolveDatabaseContents(uri);
|
||||
// Ignore the source archive for QLTest databases by default.
|
||||
const isQLTestDatabase = path.extname(uri.fsPath) === ".testproj";
|
||||
const isQLTestDatabase = extname(uri.fsPath) === ".testproj";
|
||||
const fullOptions: FullDatabaseOptions = {
|
||||
ignoreSourceArchive: isQLTestDatabase,
|
||||
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||
@@ -926,7 +924,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
// Delete folder from file system only if it is controlled by the extension
|
||||
if (this.isExtensionControlledLocation(item.databaseUri)) {
|
||||
void extLogger.log("Deleting database from filesystem.");
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
remove(item.databaseUri.fsPath).then(
|
||||
() => void extLogger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
(e) =>
|
||||
void extLogger.log(
|
||||
@@ -1003,7 +1001,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
* scripts returned by the cli's upgrade resolution.
|
||||
*/
|
||||
export function getUpgradesDirectories(scripts: string[]): vscode.Uri[] {
|
||||
const parentDirs = scripts.map((dir) => path.dirname(dir));
|
||||
const parentDirs = scripts.map((dir) => dirname(dir));
|
||||
const uniqueParentDirs = new Set(parentDirs);
|
||||
return Array.from(uniqueParentDirs).map((filePath) =>
|
||||
vscode.Uri.file(filePath),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, writeJSON, readJSON, readJSONSync } from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { cloneDbConfig, DbConfig } from "./db-config";
|
||||
import * as chokidar from "chokidar";
|
||||
import { DisposableObject, DisposeHandler } from "../../pure/disposable-object";
|
||||
@@ -23,7 +23,7 @@ export class DbConfigStore extends DisposableObject {
|
||||
super();
|
||||
|
||||
const storagePath = app.workspaceStoragePath || app.globalStoragePath;
|
||||
this.configPath = path.join(storagePath, "workspace-databases.json");
|
||||
this.configPath = join(storagePath, "workspace-databases.json");
|
||||
|
||||
this.config = this.createEmptyConfig();
|
||||
this.configErrors = [];
|
||||
@@ -57,8 +57,8 @@ export class DbConfigStore extends DisposableObject {
|
||||
}
|
||||
|
||||
private async loadConfig(): Promise<void> {
|
||||
if (!(await fs.pathExists(this.configPath))) {
|
||||
await fs.writeJSON(this.configPath, this.createEmptyConfig(), {
|
||||
if (!(await pathExists(this.configPath))) {
|
||||
await writeJSON(this.configPath, this.createEmptyConfig(), {
|
||||
spaces: 2,
|
||||
});
|
||||
}
|
||||
@@ -69,7 +69,7 @@ export class DbConfigStore extends DisposableObject {
|
||||
private async readConfig(): Promise<void> {
|
||||
let newConfig: DbConfig | undefined = undefined;
|
||||
try {
|
||||
newConfig = await fs.readJSON(this.configPath);
|
||||
newConfig = await readJSON(this.configPath);
|
||||
} catch (e) {
|
||||
this.configErrors = [`Failed to read config file: ${this.configPath}`];
|
||||
}
|
||||
@@ -84,7 +84,7 @@ export class DbConfigStore extends DisposableObject {
|
||||
private readConfigSync(): void {
|
||||
let newConfig: DbConfig | undefined = undefined;
|
||||
try {
|
||||
newConfig = fs.readJSONSync(this.configPath);
|
||||
newConfig = readJSONSync(this.configPath);
|
||||
} catch (e) {
|
||||
this.configErrors = [`Failed to read config file: ${this.configPath}`];
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { readJsonSync } from "fs-extra";
|
||||
import { resolve } from "path";
|
||||
import Ajv from "ajv";
|
||||
import { DbConfig } from "./db-config";
|
||||
|
||||
@@ -7,11 +7,11 @@ export class DbConfigValidator {
|
||||
private readonly schema: any;
|
||||
|
||||
constructor(extensionPath: string) {
|
||||
const schemaPath = path.resolve(
|
||||
const schemaPath = resolve(
|
||||
extensionPath,
|
||||
"workspace-databases-schema.json",
|
||||
);
|
||||
this.schema = fs.readJsonSync(schemaPath);
|
||||
this.schema = readJsonSync(schemaPath);
|
||||
}
|
||||
|
||||
public validate(dbConfig: DbConfig): string[] {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as vscode from "vscode";
|
||||
import { window, workspace } from "vscode";
|
||||
import { commandRunner } from "../../commandRunner";
|
||||
import { DisposableObject } from "../../pure/disposable-object";
|
||||
import { DbManager } from "../db-manager";
|
||||
@@ -12,13 +12,10 @@ export class DbPanel extends DisposableObject {
|
||||
|
||||
this.dataProvider = new DbTreeDataProvider(dbManager);
|
||||
|
||||
const treeView = vscode.window.createTreeView(
|
||||
"codeQLDatabasesExperimental",
|
||||
{
|
||||
treeDataProvider: this.dataProvider,
|
||||
canSelectMany: false,
|
||||
},
|
||||
);
|
||||
const treeView = window.createTreeView("codeQLDatabasesExperimental", {
|
||||
treeDataProvider: this.dataProvider,
|
||||
canSelectMany: false,
|
||||
});
|
||||
|
||||
this.push(treeView);
|
||||
}
|
||||
@@ -33,7 +30,7 @@ export class DbPanel extends DisposableObject {
|
||||
|
||||
private async openConfigFile(): Promise<void> {
|
||||
const configPath = this.dbManager.getConfigPath();
|
||||
const document = await vscode.workspace.openTextDocument(configPath);
|
||||
await vscode.window.showTextDocument(document);
|
||||
const document = await workspace.openTextDocument(configPath);
|
||||
await window.showTextDocument(document);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as fetch from "node-fetch";
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { pathExists, mkdtemp, createWriteStream, remove } from "fs-extra";
|
||||
import { tmpdir } from "os";
|
||||
import { delimiter, dirname, join } from "path";
|
||||
import * as semver from "semver";
|
||||
import * as url from "url";
|
||||
import { parse } from "url";
|
||||
import { ExtensionContext, Event } from "vscode";
|
||||
import { DistributionConfig } from "./config";
|
||||
import {
|
||||
@@ -153,7 +153,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
> {
|
||||
// Check config setting, then extension specific distribution, then PATH.
|
||||
if (this.config.customCodeQlPath) {
|
||||
if (!(await fs.pathExists(this.config.customCodeQlPath))) {
|
||||
if (!(await pathExists(this.config.customCodeQlPath))) {
|
||||
void showAndLogErrorMessage(
|
||||
`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
|
||||
"by a configuration setting, but a CodeQL executable could not be found at that path. Please check " +
|
||||
@@ -188,7 +188,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
}
|
||||
|
||||
if (process.env.PATH) {
|
||||
for (const searchDirectory of process.env.PATH.split(path.delimiter)) {
|
||||
for (const searchDirectory of process.env.PATH.split(delimiter)) {
|
||||
const expectedLauncherPath = await getExecutableFromDirectory(
|
||||
searchDirectory,
|
||||
);
|
||||
@@ -262,9 +262,9 @@ export class DistributionManager implements DistributionProvider {
|
||||
// not managed externally
|
||||
return false;
|
||||
}
|
||||
const dir = path.dirname(this.config.customCodeQlPath);
|
||||
const newLaunderPath = path.join(dir, codeQlLauncherName());
|
||||
return await fs.pathExists(newLaunderPath);
|
||||
const dir = dirname(this.config.customCodeQlPath);
|
||||
const newLaunderPath = join(dir, codeQlLauncherName());
|
||||
return await pathExists(newLaunderPath);
|
||||
}
|
||||
|
||||
private readonly extensionSpecificDistributionManager: ExtensionSpecificDistributionManager;
|
||||
@@ -373,13 +373,11 @@ class ExtensionSpecificDistributionManager {
|
||||
await this.createReleasesApiConsumer().streamBinaryContentOfAsset(
|
||||
assets[0],
|
||||
);
|
||||
const tmpDirectory = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "vscode-codeql"),
|
||||
);
|
||||
const tmpDirectory = await mkdtemp(join(tmpdir(), "vscode-codeql"));
|
||||
|
||||
try {
|
||||
const archivePath = path.join(tmpDirectory, "distributionDownload.zip");
|
||||
const archiveFile = fs.createWriteStream(archivePath);
|
||||
const archivePath = join(tmpDirectory, "distributionDownload.zip");
|
||||
const archiveFile = createWriteStream(archivePath);
|
||||
|
||||
const contentLength = assetStream.headers.get("content-length");
|
||||
const totalNumBytes = contentLength
|
||||
@@ -406,7 +404,7 @@ class ExtensionSpecificDistributionManager {
|
||||
);
|
||||
await extractZipArchive(archivePath, this.getDistributionStoragePath());
|
||||
} finally {
|
||||
await fs.remove(tmpDirectory);
|
||||
await remove(tmpDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -417,8 +415,8 @@ class ExtensionSpecificDistributionManager {
|
||||
*/
|
||||
private async removeDistribution(): Promise<void> {
|
||||
await this.storeInstalledRelease(undefined);
|
||||
if (await fs.pathExists(this.getDistributionStoragePath())) {
|
||||
await fs.remove(this.getDistributionStoragePath());
|
||||
if (await pathExists(this.getDistributionStoragePath())) {
|
||||
await remove(this.getDistributionStoragePath());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -484,7 +482,7 @@ class ExtensionSpecificDistributionManager {
|
||||
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
|
||||
0,
|
||||
) || "";
|
||||
return path.join(
|
||||
return join(
|
||||
this.extensionContext.globalStoragePath,
|
||||
ExtensionSpecificDistributionManager._currentDistributionFolderBaseName +
|
||||
distributionFolderIndex,
|
||||
@@ -492,7 +490,7 @@ class ExtensionSpecificDistributionManager {
|
||||
}
|
||||
|
||||
private getDistributionRootPath(): string {
|
||||
return path.join(
|
||||
return join(
|
||||
this.getDistributionStoragePath(),
|
||||
ExtensionSpecificDistributionManager._codeQlExtractedFolderName,
|
||||
);
|
||||
@@ -650,7 +648,7 @@ export class ReleasesApiConsumer {
|
||||
redirectUrl &&
|
||||
redirectCount < ReleasesApiConsumer._maxRedirects
|
||||
) {
|
||||
const parsedRedirectUrl = url.parse(redirectUrl);
|
||||
const parsedRedirectUrl = parse(redirectUrl);
|
||||
if (parsedRedirectUrl.protocol != "https:") {
|
||||
throw new Error("Encountered a non-https redirect, rejecting");
|
||||
}
|
||||
@@ -805,16 +803,16 @@ export async function getExecutableFromDirectory(
|
||||
directory: string,
|
||||
warnWhenNotFound = false,
|
||||
): Promise<string | undefined> {
|
||||
const expectedLauncherPath = path.join(directory, codeQlLauncherName());
|
||||
const expectedLauncherPath = join(directory, codeQlLauncherName());
|
||||
const deprecatedLauncherName = deprecatedCodeQlLauncherName();
|
||||
const alternateExpectedLauncherPath = deprecatedLauncherName
|
||||
? path.join(directory, deprecatedLauncherName)
|
||||
? join(directory, deprecatedLauncherName)
|
||||
: undefined;
|
||||
if (await fs.pathExists(expectedLauncherPath)) {
|
||||
if (await pathExists(expectedLauncherPath)) {
|
||||
return expectedLauncherPath;
|
||||
} else if (
|
||||
alternateExpectedLauncherPath &&
|
||||
(await fs.pathExists(alternateExpectedLauncherPath))
|
||||
(await pathExists(alternateExpectedLauncherPath))
|
||||
) {
|
||||
warnDeprecatedLauncher();
|
||||
return alternateExpectedLauncherPath;
|
||||
|
||||
@@ -20,15 +20,18 @@ import {
|
||||
version as vscodeVersion,
|
||||
} from "vscode";
|
||||
import { LanguageClient } from "vscode-languageclient/node";
|
||||
import * as os from "os";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp-promise";
|
||||
import { platform, arch } from "os";
|
||||
import { ensureDir } from "fs-extra";
|
||||
import { join, basename } from "path";
|
||||
import { dirSync } from "tmp-promise";
|
||||
import { testExplorerExtensionId, TestHub } from "vscode-test-adapter-api";
|
||||
import * as semver from "semver";
|
||||
import { parse, lt } from "semver";
|
||||
|
||||
import { AstViewer } from "./astViewer";
|
||||
import * as archiveFilesystemProvider from "./archive-filesystem-provider";
|
||||
import {
|
||||
activate as archiveFilesystemProvider_activate,
|
||||
zipArchiveScheme,
|
||||
} from "./archive-filesystem-provider";
|
||||
import QuickEvalCodeLensProvider from "./quickEvalCodeLensProvider";
|
||||
import { CodeQLCliServer, CliVersionConstraint } from "./cli";
|
||||
import {
|
||||
@@ -41,7 +44,7 @@ import {
|
||||
QueryHistoryConfigListener,
|
||||
QueryServerConfigListener,
|
||||
} from "./config";
|
||||
import * as languageSupport from "./languageSupport";
|
||||
import { install } from "./languageSupport";
|
||||
import { DatabaseItem, DatabaseManager } from "./databases";
|
||||
import { DatabaseUI } from "./databases-ui";
|
||||
import {
|
||||
@@ -82,8 +85,8 @@ import {
|
||||
} from "./common";
|
||||
import { QueryHistoryManager } from "./query-history";
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from "./query-results";
|
||||
import * as legacyQueryServer from "./legacy-query-server/queryserver-client";
|
||||
import * as newQueryServer from "./query-server/queryserver-client";
|
||||
import { QueryServerClient } from "./legacy-query-server/queryserver-client";
|
||||
import { QueryServerClient } from "./query-server/queryserver-client";
|
||||
import { displayQuickQuery } from "./quick-query";
|
||||
import { QLTestAdapterFactory } from "./test-adapter";
|
||||
import { TestUIService } from "./test-ui";
|
||||
@@ -238,7 +241,7 @@ export async function activate(
|
||||
const distributionConfigListener = new DistributionConfigListener();
|
||||
await initializeLogging(ctx);
|
||||
await initializeTelemetry(extension, ctx);
|
||||
languageSupport.install();
|
||||
install();
|
||||
|
||||
const codelensProvider = new QuickEvalCodeLensProvider();
|
||||
languages.registerCodeLensProvider(
|
||||
@@ -602,8 +605,8 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
const showResults = async (item: CompletedLocalQueryInfo) =>
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, "queries");
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
const queryStorageDir = join(ctx.globalStorageUri.fsPath, "queries");
|
||||
await ensureDir(queryStorageDir);
|
||||
const labelProvider = new HistoryItemLabelProvider(
|
||||
queryHistoryConfigurationListener,
|
||||
);
|
||||
@@ -619,11 +622,11 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(localQueryResultsView);
|
||||
|
||||
void extLogger.log("Initializing variant analysis manager.");
|
||||
const variantAnalysisStorageDir = path.join(
|
||||
const variantAnalysisStorageDir = join(
|
||||
ctx.globalStorageUri.fsPath,
|
||||
"variant-analyses",
|
||||
);
|
||||
await fs.ensureDir(variantAnalysisStorageDir);
|
||||
await ensureDir(variantAnalysisStorageDir);
|
||||
const variantAnalysisResultsManager = new VariantAnalysisResultsManager(
|
||||
cliServer,
|
||||
extLogger,
|
||||
@@ -691,7 +694,7 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(compareView);
|
||||
|
||||
void extLogger.log("Initializing source archive filesystem provider.");
|
||||
archiveFilesystemProvider.activate(ctx);
|
||||
archiveFilesystemProvider_activate(ctx);
|
||||
|
||||
async function showResultsForComparison(
|
||||
from: CompletedLocalQueryInfo,
|
||||
@@ -772,7 +775,7 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
}
|
||||
|
||||
const qhelpTmpDir = tmp.dirSync({
|
||||
const qhelpTmpDir = dirSync({
|
||||
prefix: "qhelp_",
|
||||
keep: false,
|
||||
unsafeCleanup: true,
|
||||
@@ -786,8 +789,8 @@ async function activateWithInstalledDistribution(
|
||||
: window.activeTextEditor?.document.uri.fsPath;
|
||||
if (pathToQhelp) {
|
||||
// Create temporary directory
|
||||
const relativePathToMd = path.basename(pathToQhelp, ".qhelp") + ".md";
|
||||
const absolutePathToMd = path.join(qhelpTmpDir.name, relativePathToMd);
|
||||
const relativePathToMd = basename(pathToQhelp, ".qhelp") + ".md";
|
||||
const absolutePathToMd = join(qhelpTmpDir.name, relativePathToMd);
|
||||
const uri = Uri.file(absolutePathToMd);
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
@@ -981,9 +984,7 @@ async function activateWithInstalledDistribution(
|
||||
// warn user and display selected files when a directory is selected because some ql
|
||||
// files may be hidden from the user.
|
||||
if (dirFound) {
|
||||
const fileString = files
|
||||
.map((file) => path.basename(file))
|
||||
.join(", ");
|
||||
const fileString = files.map((file) => basename(file)).join(", ");
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`You are about to run ${files.length} queries: ${fileString} Do you want to continue?`,
|
||||
);
|
||||
@@ -1367,7 +1368,7 @@ async function activateWithInstalledDistribution(
|
||||
commandRunner("codeQL.copyVersion", async () => {
|
||||
const text = `CodeQL extension version: ${
|
||||
extension?.packageJSON.version
|
||||
} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${os.platform()} ${os.arch()}`;
|
||||
} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${platform()} ${arch()}`;
|
||||
await env.clipboard.writeText(text);
|
||||
void showAndLogInformationMessage(text);
|
||||
}),
|
||||
@@ -1438,13 +1439,13 @@ async function activateWithInstalledDistribution(
|
||||
|
||||
// Store contextual queries in a temporary folder so that they are removed
|
||||
// when the application closes. There is no need for the user to interact with them.
|
||||
const contextualQueryStorageDir = path.join(
|
||||
const contextualQueryStorageDir = join(
|
||||
tmpDir.name,
|
||||
"contextual-query-storage",
|
||||
);
|
||||
await fs.ensureDir(contextualQueryStorageDir);
|
||||
await ensureDir(contextualQueryStorageDir);
|
||||
languages.registerDefinitionProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
{ scheme: zipArchiveScheme },
|
||||
new TemplateQueryDefinitionProvider(
|
||||
cliServer,
|
||||
qs,
|
||||
@@ -1454,7 +1455,7 @@ async function activateWithInstalledDistribution(
|
||||
);
|
||||
|
||||
languages.registerReferenceProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
{ scheme: zipArchiveScheme },
|
||||
new TemplateQueryReferenceProvider(
|
||||
cliServer,
|
||||
qs,
|
||||
@@ -1593,7 +1594,7 @@ async function createQueryServer(
|
||||
task,
|
||||
);
|
||||
if (await cliServer.cliConstraints.supportsNewQueryServer()) {
|
||||
const qs = new newQueryServer.QueryServerClient(
|
||||
const qs = new QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
@@ -1603,7 +1604,7 @@ async function createQueryServer(
|
||||
await qs.startQueryServer();
|
||||
return new NewQueryRunner(qs);
|
||||
} else {
|
||||
const qs = new legacyQueryServer.QueryServerClient(
|
||||
const qs = new QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
@@ -1663,8 +1664,8 @@ async function assertVSCodeVersionGreaterThan(
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const parsedVersion = semver.parse(vscodeVersion);
|
||||
const parsedMinVersion = semver.parse(minVersion);
|
||||
const parsedVersion = parse(vscodeVersion);
|
||||
const parsedMinVersion = parse(minVersion);
|
||||
if (!parsedVersion || !parsedMinVersion) {
|
||||
void showAndLogWarningMessage(
|
||||
`Could not do a version check of vscode because could not parse version number: actual vscode version ${vscodeVersion} or minimum supported vscode version ${minVersion}.`,
|
||||
@@ -1672,7 +1673,7 @@ async function assertVSCodeVersionGreaterThan(
|
||||
return;
|
||||
}
|
||||
|
||||
if (semver.lt(parsedVersion, parsedMinVersion)) {
|
||||
if (lt(parsedVersion, parsedMinVersion)) {
|
||||
const message = `The CodeQL extension requires VS Code version ${minVersion} or later. Current version is ${vscodeVersion}. Please update VS Code to get the latest features of CodeQL.`;
|
||||
const result = await showBinaryChoiceDialog(
|
||||
message,
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import * as fs from "fs-extra";
|
||||
import {
|
||||
ensureDirSync,
|
||||
readFile,
|
||||
pathExists,
|
||||
ensureDir,
|
||||
writeFile,
|
||||
opendir,
|
||||
} from "fs-extra";
|
||||
import * as glob from "glob-promise";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp-promise";
|
||||
import { load } from "js-yaml";
|
||||
import { join, basename } from "path";
|
||||
import { dirSync } from "tmp-promise";
|
||||
import {
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
@@ -16,13 +23,13 @@ import { extLogger } from "./common";
|
||||
import { QueryMetadata } from "./pure/interface-types";
|
||||
|
||||
// Shared temporary folder for the extension.
|
||||
export const tmpDir = tmp.dirSync({
|
||||
export const tmpDir = dirSync({
|
||||
prefix: "queries_",
|
||||
keep: false,
|
||||
unsafeCleanup: true,
|
||||
});
|
||||
export const upgradesTmpDir = path.join(tmpDir.name, "upgrades");
|
||||
fs.ensureDirSync(upgradesTmpDir);
|
||||
export const upgradesTmpDir = join(tmpDir.name, "upgrades");
|
||||
ensureDirSync(upgradesTmpDir);
|
||||
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
@@ -360,12 +367,12 @@ async function findDbschemePack(
|
||||
): Promise<{ name: string; isLibraryPack: boolean }> {
|
||||
for (const { packDir, packName } of packs) {
|
||||
if (packDir !== undefined) {
|
||||
const qlpack = yaml.load(
|
||||
await fs.readFile(path.join(packDir, "qlpack.yml"), "utf8"),
|
||||
const qlpack = load(
|
||||
await readFile(join(packDir, "qlpack.yml"), "utf8"),
|
||||
) as { dbscheme?: string; library?: boolean };
|
||||
if (
|
||||
qlpack.dbscheme !== undefined &&
|
||||
path.basename(qlpack.dbscheme) === path.basename(dbschemePath)
|
||||
basename(qlpack.dbscheme) === basename(dbschemePath)
|
||||
) {
|
||||
return {
|
||||
name: packName,
|
||||
@@ -432,7 +439,7 @@ export async function getQlPackForDbscheme(
|
||||
export async function getPrimaryDbscheme(
|
||||
datasetFolder: string,
|
||||
): Promise<string> {
|
||||
const dbschemes = await glob(path.join(datasetFolder, "*.dbscheme"));
|
||||
const dbschemes = await glob(join(datasetFolder, "*.dbscheme"));
|
||||
|
||||
if (dbschemes.length < 1) {
|
||||
throw new Error(
|
||||
@@ -568,9 +575,7 @@ export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce(
|
||||
*/
|
||||
export function getInitialQueryContents(language: string, dbscheme: string) {
|
||||
if (!language) {
|
||||
const dbschemeBase = path.basename(
|
||||
dbscheme,
|
||||
) as keyof typeof dbSchemeToLanguage;
|
||||
const dbschemeBase = basename(dbscheme) as keyof typeof dbSchemeToLanguage;
|
||||
language = dbSchemeToLanguage[dbschemeBase];
|
||||
}
|
||||
|
||||
@@ -586,8 +591,8 @@ export function getInitialQueryContents(language: string, dbscheme: string) {
|
||||
export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
const [a, b, c] = await Promise.all([
|
||||
// databases can have either .dbinfo or codeql-database.yml.
|
||||
fs.pathExists(path.join(maybeRoot, ".dbinfo")),
|
||||
fs.pathExists(path.join(maybeRoot, "codeql-database.yml")),
|
||||
pathExists(join(maybeRoot, ".dbinfo")),
|
||||
pathExists(join(maybeRoot, "codeql-database.yml")),
|
||||
|
||||
// they *must* have a db-{language} folder
|
||||
glob("db-*/", { cwd: maybeRoot }),
|
||||
@@ -601,8 +606,7 @@ export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
*/
|
||||
export async function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return (
|
||||
path.basename(dbPath).startsWith("db-") &&
|
||||
!(await isLikelyDatabaseRoot(dbPath))
|
||||
basename(dbPath).startsWith("db-") && !(await isLikelyDatabaseRoot(dbPath))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -686,9 +690,9 @@ export async function tryGetQueryMetadata(
|
||||
* It does not need to exist.
|
||||
*/
|
||||
export async function createTimestampFile(storagePath: string) {
|
||||
const timestampPath = path.join(storagePath, "timestamp");
|
||||
await fs.ensureDir(storagePath);
|
||||
await fs.writeFile(timestampPath, Date.now().toString(), "utf8");
|
||||
const timestampPath = join(storagePath, "timestamp");
|
||||
await ensureDir(storagePath);
|
||||
await writeFile(timestampPath, Date.now().toString(), "utf8");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -703,8 +707,8 @@ export async function* walkDirectory(
|
||||
dir: string,
|
||||
): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await fs.opendir(dir)) {
|
||||
const entry = path.join(dir, d.name);
|
||||
for await (const d of await opendir(dir)) {
|
||||
const entry = join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { env } from "vscode";
|
||||
import * as path from "path";
|
||||
import { basename } from "path";
|
||||
import { QueryHistoryConfig } from "./config";
|
||||
import { LocalQueryInfo } from "./query-results";
|
||||
import {
|
||||
@@ -106,7 +106,7 @@ export class HistoryItemLabelProvider {
|
||||
d: buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: humanizeQueryStatus(item.status),
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
f: basename(item.remoteQuery.queryFilePath),
|
||||
"%": "%",
|
||||
};
|
||||
}
|
||||
@@ -125,7 +125,7 @@ export class HistoryItemLabelProvider {
|
||||
d: buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: humanizeQueryStatus(item.status),
|
||||
f: path.basename(item.variantAnalysis.query.filePath),
|
||||
f: basename(item.variantAnalysis.query.filePath),
|
||||
"%": "%",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ProgressLocation, window } from "vscode";
|
||||
import { StreamInfo } from "vscode-languageclient/node";
|
||||
import * as cli from "./cli";
|
||||
import { shouldDebugIdeServer, spawnServer } from "./cli";
|
||||
import { QueryServerConfig } from "./config";
|
||||
import { ideServerLogger } from "./common";
|
||||
|
||||
@@ -16,12 +16,12 @@ export async function spawnIdeServer(
|
||||
{ title: "CodeQL language server", location: ProgressLocation.Window },
|
||||
async (progressReporter, _) => {
|
||||
const args = ["--check-errors", "ON_CHANGE"];
|
||||
if (cli.shouldDebugIdeServer()) {
|
||||
if (shouldDebugIdeServer()) {
|
||||
args.push(
|
||||
"-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9009,server=y,suspend=n,quiet=y",
|
||||
);
|
||||
}
|
||||
const child = cli.spawnServer(
|
||||
const child = spawnServer(
|
||||
config.codeQlPath,
|
||||
"CodeQL language server",
|
||||
["execute", "language-server"],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as crypto from "crypto";
|
||||
import * as os from "os";
|
||||
import { randomBytes } from "crypto";
|
||||
import { EOL } from "os";
|
||||
import {
|
||||
Uri,
|
||||
Location,
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
|
||||
/** Gets a nonce string created with 128 bits of entropy. */
|
||||
export function getNonce(): string {
|
||||
return crypto.randomBytes(16).toString("base64");
|
||||
return randomBytes(16).toString("base64");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -175,7 +175,7 @@ export function getHtmlForWebview(
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${
|
||||
webview.cspSource
|
||||
};">
|
||||
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||
${stylesheetsHtmlLines.join(` ${EOL}`)}
|
||||
</head>
|
||||
<body>
|
||||
<div id=root data-view="${view}">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { dirname } from "path";
|
||||
import { ensureFile } from "fs-extra";
|
||||
|
||||
import { DisposableObject } from "../pure/disposable-object";
|
||||
import { CancellationToken, commands } from "vscode";
|
||||
@@ -13,8 +13,8 @@ import {
|
||||
progress,
|
||||
ProgressMessage,
|
||||
WithProgressId,
|
||||
compileQuery,
|
||||
} from "../pure/legacy-messages";
|
||||
import * as messages from "../pure/legacy-messages";
|
||||
import { ProgressCallback, ProgressTask } from "../commandRunner";
|
||||
import { findQueryLogFile } from "../run-queries-shared";
|
||||
import { ServerProcess } from "../json-rpc-server";
|
||||
@@ -154,7 +154,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
await ensureFile(structuredLogFile);
|
||||
|
||||
args.push("--evaluator-log");
|
||||
args.push(structuredLogFile);
|
||||
@@ -270,10 +270,8 @@ export class QueryServerClient extends DisposableObject {
|
||||
* properly will require a change in the query server.
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.compileQuery.method) {
|
||||
this.activeQueryLogFile = findQueryLogFile(
|
||||
path.dirname(parameter.resultPath),
|
||||
);
|
||||
if (method === compileQuery.method) {
|
||||
this.activeQueryLogFile = findQueryLogFile(dirname(parameter.resultPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as crypto from "crypto";
|
||||
import * as fs from "fs-extra";
|
||||
import { createHash } from "crypto";
|
||||
import { readFile } from "fs-extra";
|
||||
import * as tmp from "tmp-promise";
|
||||
import * as path from "path";
|
||||
import { basename, join } from "path";
|
||||
import { CancellationToken, Uri } from "vscode";
|
||||
import { LSPErrorCodes, ResponseError } from "vscode-languageclient";
|
||||
|
||||
@@ -257,9 +257,8 @@ async function checkDbschemeCompatibility(
|
||||
false,
|
||||
);
|
||||
const hash = async function (filename: string): Promise<string> {
|
||||
return crypto
|
||||
.createHash("sha256")
|
||||
.update(await fs.readFile(filename))
|
||||
return createHash("sha256")
|
||||
.update(await readFile(filename))
|
||||
.digest("hex");
|
||||
};
|
||||
|
||||
@@ -379,14 +378,14 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
// database. (Queries that merely need the database to be upgraded
|
||||
// won't trigger this check)
|
||||
// This test will produce confusing results if we ever change the name of the database schema files.
|
||||
const querySchemaName = path.basename(packConfig.dbscheme);
|
||||
const dbSchemaName = path.basename(dbItem.contents.dbSchemeUri.fsPath);
|
||||
const querySchemaName = basename(packConfig.dbscheme);
|
||||
const dbSchemaName = basename(dbItem.contents.dbSchemeUri.fsPath);
|
||||
if (querySchemaName != dbSchemaName) {
|
||||
void extLogger.log(
|
||||
`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`,
|
||||
);
|
||||
throw new Error(
|
||||
`The query ${path.basename(
|
||||
`The query ${basename(
|
||||
initialInfo.queryPath,
|
||||
)} cannot be run against the selected database (${
|
||||
dbItem.name
|
||||
@@ -441,7 +440,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
|
||||
const hasMetadataFile = await dbItem.hasMetadataFile();
|
||||
const query = new QueryInProgress(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
packConfig.dbscheme,
|
||||
|
||||
@@ -9,7 +9,7 @@ import { extLogger } from "../common";
|
||||
import * as messages from "../pure/legacy-messages";
|
||||
import * as qsClient from "./queryserver-client";
|
||||
import * as tmp from "tmp-promise";
|
||||
import * as path from "path";
|
||||
import { dirname } from "path";
|
||||
import { DatabaseItem } from "../databases";
|
||||
|
||||
/**
|
||||
@@ -69,7 +69,7 @@ async function compileDatabaseUpgrade(
|
||||
// We have the upgrades we want but compileUpgrade
|
||||
// requires searching for them. So we use the parent directories of the upgrades
|
||||
// as the upgrade path.
|
||||
const parentDirs = resolvedSequence.map((dir) => path.dirname(dir));
|
||||
const parentDirs = resolvedSequence.map((dir) => dirname(dir));
|
||||
const uniqueParentDirs = new Set(parentDirs);
|
||||
progress({
|
||||
step: 1,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile } from "fs-extra";
|
||||
|
||||
/**
|
||||
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
|
||||
@@ -14,7 +14,7 @@ export async function readJsonlFile(
|
||||
path: string,
|
||||
handler: (value: any) => Promise<void>,
|
||||
): Promise<void> {
|
||||
const logSummary = await fs.readFile(path, "utf-8");
|
||||
const logSummary = await readFile(path, "utf-8");
|
||||
|
||||
// Remove newline delimiters because summary is in .jsonl format.
|
||||
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
EvaluationLogScannerSet,
|
||||
} from "./log-scanner";
|
||||
import { PipelineInfo, SummarySymbols } from "./summary-parser";
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile } from "fs-extra";
|
||||
import { extLogger } from "../common";
|
||||
|
||||
/**
|
||||
@@ -125,7 +125,7 @@ export class LogScannerService extends DisposableObject {
|
||||
let symbols: SummarySymbols | undefined = undefined;
|
||||
if (symbolsLocation !== undefined) {
|
||||
symbols = JSON.parse(
|
||||
await fs.readFile(symbolsLocation, { encoding: "utf-8" }),
|
||||
await readFile(symbolsLocation, { encoding: "utf-8" }),
|
||||
);
|
||||
}
|
||||
const problemReporter = new ProblemReporter(symbols);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile } from "fs-extra";
|
||||
import { RawSourceMap, SourceMapConsumer } from "source-map";
|
||||
import {
|
||||
commands,
|
||||
@@ -98,7 +98,7 @@ export class SummaryLanguageSupport extends DisposableObject {
|
||||
const mapPath = document.uri.fsPath + ".map";
|
||||
|
||||
try {
|
||||
const sourceMapText = await fs.readFile(mapPath, "utf-8");
|
||||
const sourceMapText = await readFile(mapPath, "utf-8");
|
||||
const rawMap: RawSourceMap = JSON.parse(sourceMapText);
|
||||
this.sourceMap = await new SourceMapConsumer(rawMap);
|
||||
} catch (e: unknown) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { writeFile, promises } from "fs-extra";
|
||||
|
||||
/**
|
||||
* Location information for a single pipeline invocation in the RA.
|
||||
@@ -51,7 +51,7 @@ export async function generateSummarySymbolsFile(
|
||||
symbolsPath: string,
|
||||
): Promise<void> {
|
||||
const symbols = await generateSummarySymbols(summaryPath);
|
||||
await fs.writeFile(symbolsPath, JSON.stringify(symbols));
|
||||
await writeFile(symbolsPath, JSON.stringify(symbols));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,7 +64,7 @@ export async function generateSummarySymbolsFile(
|
||||
async function generateSummarySymbols(
|
||||
summaryPath: string,
|
||||
): Promise<SummarySymbols> {
|
||||
const summary = await fs.promises.readFile(summaryPath, {
|
||||
const summary = await promises.readFile(summaryPath, {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
const symbols: SummarySymbols = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { join, resolve } from "path";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { setupServer, SetupServerApi } from "msw/node";
|
||||
|
||||
import { DisposableObject } from "../pure/disposable-object";
|
||||
@@ -50,7 +50,7 @@ export class MockGitHubApiServer extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
const scenarioPath = path.join(scenariosPath, scenarioName);
|
||||
const scenarioPath = join(scenariosPath, scenarioName);
|
||||
|
||||
const handlers = await createRequestHandlers(scenarioPath);
|
||||
this.server.resetHandlers();
|
||||
@@ -129,10 +129,10 @@ export class MockGitHubApiServer extends DisposableObject {
|
||||
|
||||
public async getDefaultScenariosPath(): Promise<string | undefined> {
|
||||
// This should be the directory where package.json is located
|
||||
const rootDirectory = path.resolve(__dirname, "../..");
|
||||
const rootDirectory = resolve(__dirname, "../..");
|
||||
|
||||
const scenariosPath = path.resolve(rootDirectory, "src/mocks/scenarios");
|
||||
if (await fs.pathExists(scenariosPath)) {
|
||||
const scenariosPath = resolve(rootDirectory, "src/mocks/scenarios");
|
||||
if (await pathExists(scenariosPath)) {
|
||||
return scenariosPath;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { ensureDir, writeFile } from "fs-extra";
|
||||
import { join } from "path";
|
||||
|
||||
import { MockedRequest } from "msw";
|
||||
import { SetupServerApi } from "msw/node";
|
||||
@@ -66,15 +66,15 @@ export class Recorder extends DisposableObject {
|
||||
}
|
||||
|
||||
public async save(scenariosPath: string, name: string): Promise<string> {
|
||||
const scenarioDirectory = path.join(scenariosPath, name);
|
||||
const scenarioDirectory = join(scenariosPath, name);
|
||||
|
||||
await fs.ensureDir(scenarioDirectory);
|
||||
await ensureDir(scenarioDirectory);
|
||||
|
||||
for (let i = 0; i < this.currentRecordedScenario.length; i++) {
|
||||
const request = this.currentRecordedScenario[i];
|
||||
|
||||
const fileName = `${i}-${request.request.kind}.json`;
|
||||
const filePath = path.join(scenarioDirectory, fileName);
|
||||
const filePath = join(scenarioDirectory, fileName);
|
||||
|
||||
let writtenRequest = {
|
||||
...request,
|
||||
@@ -87,8 +87,8 @@ export class Recorder extends DisposableObject {
|
||||
: "bin";
|
||||
|
||||
const bodyFileName = `${i}-${writtenRequest.request.kind}.body.${extension}`;
|
||||
const bodyFilePath = path.join(scenarioDirectory, bodyFileName);
|
||||
await fs.writeFile(bodyFilePath, writtenRequest.response.body);
|
||||
const bodyFilePath = join(scenarioDirectory, bodyFileName);
|
||||
await writeFile(bodyFilePath, writtenRequest.response.body);
|
||||
|
||||
writtenRequest = {
|
||||
...writtenRequest,
|
||||
@@ -99,7 +99,7 @@ export class Recorder extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
await fs.writeFile(filePath, JSON.stringify(writtenRequest, null, 2));
|
||||
await writeFile(filePath, JSON.stringify(writtenRequest, null, 2));
|
||||
}
|
||||
|
||||
this.stop();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { readdir, readJson, readFile } from "fs-extra";
|
||||
import { DefaultBodyType, MockedRequest, rest, RestHandler } from "msw";
|
||||
import {
|
||||
GitHubApiRequest,
|
||||
@@ -33,7 +33,7 @@ export async function createRequestHandlers(
|
||||
async function readRequestFiles(
|
||||
scenarioDirPath: string,
|
||||
): Promise<GitHubApiRequest[]> {
|
||||
const files = await fs.readdir(scenarioDirPath);
|
||||
const files = await readdir(scenarioDirPath);
|
||||
|
||||
const orderedFiles = files.sort((a, b) => {
|
||||
const aNum = parseInt(a.split("-")[0]);
|
||||
@@ -47,8 +47,8 @@ async function readRequestFiles(
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = path.join(scenarioDirPath, file);
|
||||
const request: GitHubApiRequest = await fs.readJson(filePath, {
|
||||
const filePath = join(scenarioDirPath, file);
|
||||
const request: GitHubApiRequest = await readJson(filePath, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
|
||||
@@ -56,8 +56,8 @@ async function readRequestFiles(
|
||||
typeof request.response.body === "string" &&
|
||||
request.response.body.startsWith("file:")
|
||||
) {
|
||||
request.response.body = await fs.readFile(
|
||||
path.join(scenarioDirPath, request.response.body.substring(5)),
|
||||
request.response.body = await readFile(
|
||||
join(scenarioDirPath, request.response.body.substring(5)),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists } from "fs-extra";
|
||||
import {
|
||||
commands,
|
||||
env,
|
||||
@@ -216,7 +216,7 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
|
||||
this.ctx.extensionUri,
|
||||
"src/mocks/scenarios",
|
||||
).fsPath.toString();
|
||||
if (await fs.pathExists(developmentScenariosPath)) {
|
||||
if (await pathExists(developmentScenariosPath)) {
|
||||
return developmentScenariosPath;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import * as os from "os";
|
||||
import * as unzipper from "unzipper";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { platform } from "os";
|
||||
import { Open } from "unzipper";
|
||||
import { join } from "path";
|
||||
import { pathExists, chmod } from "fs-extra";
|
||||
|
||||
/**
|
||||
* Get the name of the codeql cli installation we prefer to install, based on our current platform.
|
||||
*/
|
||||
export function getRequiredAssetName(): string {
|
||||
switch (os.platform()) {
|
||||
switch (platform()) {
|
||||
case "linux":
|
||||
return "codeql-linux64.zip";
|
||||
case "darwin":
|
||||
@@ -23,7 +23,7 @@ export async function extractZipArchive(
|
||||
archivePath: string,
|
||||
outPath: string,
|
||||
): Promise<void> {
|
||||
const archive = await unzipper.Open.file(archivePath);
|
||||
const archive = await Open.file(archivePath);
|
||||
await archive.extract({
|
||||
concurrency: 4,
|
||||
path: outPath,
|
||||
@@ -32,22 +32,22 @@ export async function extractZipArchive(
|
||||
await Promise.all(
|
||||
archive.files.map(async (file) => {
|
||||
// Only change file permissions if within outPath (path.join normalises the path)
|
||||
const extractedPath = path.join(outPath, file.path);
|
||||
const extractedPath = join(outPath, file.path);
|
||||
if (
|
||||
extractedPath.indexOf(outPath) !== 0 ||
|
||||
!(await fs.pathExists(extractedPath))
|
||||
!(await pathExists(extractedPath))
|
||||
) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return fs.chmod(extractedPath, file.externalFileAttributes >>> 16);
|
||||
return chmod(extractedPath, file.externalFileAttributes >>> 16);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export function codeQlLauncherName(): string {
|
||||
return os.platform() === "win32" ? "codeql.exe" : "codeql";
|
||||
return platform() === "win32" ? "codeql.exe" : "codeql";
|
||||
}
|
||||
|
||||
export function deprecatedCodeQlLauncherName(): string | undefined {
|
||||
return os.platform() === "win32" ? "codeql.cmd" : undefined;
|
||||
return platform() === "win32" ? "codeql.cmd" : undefined;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, stat, readdir } from "fs-extra";
|
||||
import { join } from "path";
|
||||
|
||||
/**
|
||||
* Recursively finds all .ql files in this set of Uris.
|
||||
@@ -14,13 +14,10 @@ export async function gatherQlFiles(
|
||||
const gatheredUris: Set<string> = new Set();
|
||||
let dirFound = false;
|
||||
for (const nextPath of paths) {
|
||||
if (
|
||||
(await fs.pathExists(nextPath)) &&
|
||||
(await fs.stat(nextPath)).isDirectory()
|
||||
) {
|
||||
if ((await pathExists(nextPath)) && (await stat(nextPath)).isDirectory()) {
|
||||
dirFound = true;
|
||||
const subPaths = await fs.readdir(nextPath);
|
||||
const fullPaths = subPaths.map((p) => path.join(nextPath, p));
|
||||
const subPaths = await readdir(nextPath);
|
||||
const fullPaths = subPaths.map((p) => join(nextPath, p));
|
||||
const nestedFiles = (await gatherQlFiles(fullPaths))[0];
|
||||
nestedFiles.forEach((nested) => gatheredUris.add(nested));
|
||||
} else if (nextPath.endsWith(".ql")) {
|
||||
@@ -38,14 +35,14 @@ export async function gatherQlFiles(
|
||||
export async function getDirectoryNamesInsidePath(
|
||||
path: string,
|
||||
): Promise<string[]> {
|
||||
if (!(await fs.pathExists(path))) {
|
||||
if (!(await pathExists(path))) {
|
||||
throw Error(`Path does not exist: ${path}`);
|
||||
}
|
||||
if (!(await fs.stat(path)).isDirectory()) {
|
||||
if (!(await stat(path)).isDirectory()) {
|
||||
throw Error(`Path is not a directory: ${path}`);
|
||||
}
|
||||
|
||||
const dirItems = await fs.readdir(path, { withFileTypes: true });
|
||||
const dirItems = await readdir(path, { withFileTypes: true });
|
||||
|
||||
const dirNames = dirItems
|
||||
.filter((dirent) => dirent.isDirectory())
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from "vscode-jsonrpc";
|
||||
import { RequestType } from "vscode-jsonrpc";
|
||||
import * as shared from "./messages-shared";
|
||||
|
||||
/**
|
||||
@@ -970,7 +970,7 @@ export type ProgressMessage = shared.ProgressMessage;
|
||||
/**
|
||||
* Check a Ql query for errors without compiling it
|
||||
*/
|
||||
export const checkQuery = new rpc.RequestType<
|
||||
export const checkQuery = new RequestType<
|
||||
WithProgressId<CheckQueryParams>,
|
||||
CheckQueryResult,
|
||||
void
|
||||
@@ -978,7 +978,7 @@ export const checkQuery = new rpc.RequestType<
|
||||
/**
|
||||
* Compile a Ql query into a qlo
|
||||
*/
|
||||
export const compileQuery = new rpc.RequestType<
|
||||
export const compileQuery = new RequestType<
|
||||
WithProgressId<CompileQueryParams>,
|
||||
CheckQueryResult,
|
||||
void
|
||||
@@ -986,7 +986,7 @@ export const compileQuery = new rpc.RequestType<
|
||||
/**
|
||||
* Compile a dil query into a qlo
|
||||
*/
|
||||
export const compileDilQuery = new rpc.RequestType<
|
||||
export const compileDilQuery = new RequestType<
|
||||
WithProgressId<CompileDilParams>,
|
||||
CheckQueryResult,
|
||||
void
|
||||
@@ -995,7 +995,7 @@ export const compileDilQuery = new rpc.RequestType<
|
||||
/**
|
||||
* Check if there is a valid upgrade path between two dbschemes.
|
||||
*/
|
||||
export const checkUpgrade = new rpc.RequestType<
|
||||
export const checkUpgrade = new RequestType<
|
||||
WithProgressId<UpgradeParams>,
|
||||
CheckUpgradeResult,
|
||||
void
|
||||
@@ -1003,7 +1003,7 @@ export const checkUpgrade = new rpc.RequestType<
|
||||
/**
|
||||
* Compile an upgrade script to upgrade a dataset.
|
||||
*/
|
||||
export const compileUpgrade = new rpc.RequestType<
|
||||
export const compileUpgrade = new RequestType<
|
||||
WithProgressId<CompileUpgradeParams>,
|
||||
CompileUpgradeResult,
|
||||
void
|
||||
@@ -1011,7 +1011,7 @@ export const compileUpgrade = new rpc.RequestType<
|
||||
/**
|
||||
* Compile an upgrade script to upgrade a dataset.
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<
|
||||
export const compileUpgradeSequence = new RequestType<
|
||||
WithProgressId<CompileUpgradeSequenceParams>,
|
||||
CompileUpgradeSequenceResult,
|
||||
void
|
||||
@@ -1020,7 +1020,7 @@ export const compileUpgradeSequence = new rpc.RequestType<
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<
|
||||
export const startLog = new RequestType<
|
||||
WithProgressId<StartLogParams>,
|
||||
StartLogResult,
|
||||
void
|
||||
@@ -1029,7 +1029,7 @@ export const startLog = new rpc.RequestType<
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<
|
||||
export const endLog = new RequestType<
|
||||
WithProgressId<EndLogParams>,
|
||||
EndLogResult,
|
||||
void
|
||||
@@ -1038,7 +1038,7 @@ export const endLog = new rpc.RequestType<
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
export const clearCache = new rpc.RequestType<
|
||||
export const clearCache = new RequestType<
|
||||
WithProgressId<ClearCacheParams>,
|
||||
ClearCacheResult,
|
||||
void
|
||||
@@ -1046,7 +1046,7 @@ export const clearCache = new rpc.RequestType<
|
||||
/**
|
||||
* Trim the cache of a dataset
|
||||
*/
|
||||
export const trimCache = new rpc.RequestType<
|
||||
export const trimCache = new RequestType<
|
||||
WithProgressId<TrimCacheParams>,
|
||||
ClearCacheResult,
|
||||
void
|
||||
@@ -1055,7 +1055,7 @@ export const trimCache = new rpc.RequestType<
|
||||
/**
|
||||
* Run some queries on a dataset
|
||||
*/
|
||||
export const runQueries = new rpc.RequestType<
|
||||
export const runQueries = new RequestType<
|
||||
WithProgressId<EvaluateQueriesParams>,
|
||||
EvaluationComplete,
|
||||
void
|
||||
@@ -1064,19 +1064,19 @@ export const runQueries = new rpc.RequestType<
|
||||
/**
|
||||
* Run upgrades on a dataset
|
||||
*/
|
||||
export const runUpgrade = new rpc.RequestType<
|
||||
export const runUpgrade = new RequestType<
|
||||
WithProgressId<RunUpgradeParams>,
|
||||
RunUpgradeResult,
|
||||
void
|
||||
>("evaluation/runUpgrade");
|
||||
|
||||
export const registerDatabases = new rpc.RequestType<
|
||||
export const registerDatabases = new RequestType<
|
||||
WithProgressId<RegisterDatabasesParams>,
|
||||
RegisterDatabasesResult,
|
||||
void
|
||||
>("evaluation/registerDatabases");
|
||||
|
||||
export const deregisterDatabases = new rpc.RequestType<
|
||||
export const deregisterDatabases = new RequestType<
|
||||
WithProgressId<DeregisterDatabasesParams>,
|
||||
DeregisterDatabasesResult,
|
||||
void
|
||||
@@ -1086,7 +1086,7 @@ export const deregisterDatabases = new rpc.RequestType<
|
||||
* Request returned to the client to notify completion of a query.
|
||||
* The full runQueries job is completed when all queries are acknowledged.
|
||||
*/
|
||||
export const completeQuery = new rpc.RequestType<
|
||||
export const completeQuery = new RequestType<
|
||||
EvaluationResult,
|
||||
Record<string, any>,
|
||||
void
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from "vscode-jsonrpc";
|
||||
import { NotificationType } from "vscode-jsonrpc";
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
@@ -106,6 +106,6 @@ export interface ProgressMessage {
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = new rpc.NotificationType<ProgressMessage>(
|
||||
export const progress = new NotificationType<ProgressMessage>(
|
||||
"ql/progressUpdated",
|
||||
);
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from "vscode-jsonrpc";
|
||||
import { RequestType } from "vscode-jsonrpc";
|
||||
import * as shared from "./messages-shared";
|
||||
|
||||
/**
|
||||
@@ -164,7 +164,7 @@ export type ProgressMessage = shared.ProgressMessage;
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
export const clearCache = new rpc.RequestType<
|
||||
export const clearCache = new RequestType<
|
||||
WithProgressId<ClearCacheParams>,
|
||||
ClearCacheResult,
|
||||
void
|
||||
@@ -172,7 +172,7 @@ export const clearCache = new rpc.RequestType<
|
||||
/**
|
||||
* Trim the cache of a dataset
|
||||
*/
|
||||
export const trimCache = new rpc.RequestType<
|
||||
export const trimCache = new RequestType<
|
||||
WithProgressId<TrimCacheParams>,
|
||||
ClearCacheResult,
|
||||
void
|
||||
@@ -181,7 +181,7 @@ export const trimCache = new rpc.RequestType<
|
||||
/**
|
||||
* Clear the pack cache
|
||||
*/
|
||||
export const clearPackCache = new rpc.RequestType<
|
||||
export const clearPackCache = new RequestType<
|
||||
WithProgressId<ClearPackCacheParams>,
|
||||
ClearPackCacheResult,
|
||||
void
|
||||
@@ -190,25 +190,25 @@ export const clearPackCache = new rpc.RequestType<
|
||||
/**
|
||||
* Run a query on a database
|
||||
*/
|
||||
export const runQuery = new rpc.RequestType<
|
||||
export const runQuery = new RequestType<
|
||||
WithProgressId<RunQueryParams>,
|
||||
RunQueryResult,
|
||||
void
|
||||
>("evaluation/runQuery");
|
||||
|
||||
export const registerDatabases = new rpc.RequestType<
|
||||
export const registerDatabases = new RequestType<
|
||||
WithProgressId<RegisterDatabasesParams>,
|
||||
RegisterDatabasesResult,
|
||||
void
|
||||
>("evaluation/registerDatabases");
|
||||
|
||||
export const deregisterDatabases = new rpc.RequestType<
|
||||
export const deregisterDatabases = new RequestType<
|
||||
WithProgressId<DeregisterDatabasesParams>,
|
||||
DeregisterDatabasesResult,
|
||||
void
|
||||
>("evaluation/deregisterDatabases");
|
||||
|
||||
export const upgradeDatabase = new rpc.RequestType<
|
||||
export const upgradeDatabase = new RequestType<
|
||||
WithProgressId<UpgradeParams>,
|
||||
UpgradeResult,
|
||||
void
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as unzipper from "unzipper";
|
||||
import { Open } from "unzipper";
|
||||
|
||||
/**
|
||||
* Unzips a zip file to a directory.
|
||||
@@ -6,6 +6,6 @@ import * as unzipper from "unzipper";
|
||||
* @param destinationPath The path to the directory to unzip to.
|
||||
*/
|
||||
export async function unzipFile(sourcePath: string, destinationPath: string) {
|
||||
const file = await unzipper.Open.file(sourcePath);
|
||||
const file = await Open.file(sourcePath);
|
||||
await file.extract({ path: destinationPath });
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { dirname, basename, join, normalize, relative, extname } from "path";
|
||||
import { Discovery } from "./discovery";
|
||||
import {
|
||||
EventEmitter,
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
} from "vscode";
|
||||
import { MultiFileSystemWatcher } from "./vscode-utils/multi-file-system-watcher";
|
||||
import { CodeQLCliServer } from "./cli";
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists } from "fs-extra";
|
||||
|
||||
/**
|
||||
* A node in the tree of tests. This will be either a `QLTestDirectory` or a `QLTestFile`.
|
||||
@@ -52,12 +52,12 @@ export class QLTestDirectory extends QLTestNode {
|
||||
}
|
||||
|
||||
public createDirectory(relativePath: string): QLTestDirectory {
|
||||
const dirName = path.dirname(relativePath);
|
||||
const dirName = dirname(relativePath);
|
||||
if (dirName === ".") {
|
||||
return this.createChildDirectory(relativePath);
|
||||
} else {
|
||||
const parent = this.createDirectory(dirName);
|
||||
return parent.createDirectory(path.basename(relativePath));
|
||||
return parent.createDirectory(basename(relativePath));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,7 +89,7 @@ export class QLTestDirectory extends QLTestNode {
|
||||
if (existingChild !== undefined) {
|
||||
return existingChild as QLTestDirectory;
|
||||
} else {
|
||||
const newChild = new QLTestDirectory(path.join(this.path, name), name);
|
||||
const newChild = new QLTestDirectory(join(this.path, name), name);
|
||||
this.addChild(newChild);
|
||||
return newChild;
|
||||
}
|
||||
@@ -200,17 +200,15 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
||||
const rootDirectory = new QLTestDirectory(fullPath, name);
|
||||
|
||||
// Don't try discovery on workspace folders that don't exist on the filesystem
|
||||
if (await fs.pathExists(fullPath)) {
|
||||
if (await pathExists(fullPath)) {
|
||||
const resolvedTests = (
|
||||
await this.cliServer.resolveTests(fullPath)
|
||||
).filter((testPath) => !QLTestDiscovery.ignoreTestPath(testPath));
|
||||
for (const testPath of resolvedTests) {
|
||||
const relativePath = path.normalize(path.relative(fullPath, testPath));
|
||||
const dirName = path.dirname(relativePath);
|
||||
const relativePath = normalize(relative(fullPath, testPath));
|
||||
const dirName = dirname(relativePath);
|
||||
const parentDirectory = rootDirectory.createDirectory(dirName);
|
||||
parentDirectory.addChild(
|
||||
new QLTestFile(testPath, path.basename(testPath)),
|
||||
);
|
||||
parentDirectory.addChild(new QLTestFile(testPath, basename(testPath)));
|
||||
}
|
||||
|
||||
rootDirectory.finish();
|
||||
@@ -223,10 +221,10 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
||||
* @param testPath Path to the test file.
|
||||
*/
|
||||
private static ignoreTestPath(testPath: string): boolean {
|
||||
switch (path.extname(testPath).toLowerCase()) {
|
||||
switch (extname(testPath).toLowerCase()) {
|
||||
case ".ql":
|
||||
case ".qlref":
|
||||
return path.basename(testPath).startsWith("__");
|
||||
return basename(testPath).startsWith("__");
|
||||
|
||||
default:
|
||||
return false;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { pathExists, readdir, stat, remove, readFile } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
import { join } from "path";
|
||||
import { Disposable, ExtensionContext } from "vscode";
|
||||
import { extLogger } from "./common";
|
||||
import { QueryHistoryManager } from "./query-history";
|
||||
@@ -75,17 +75,17 @@ async function scrubQueries(
|
||||
try {
|
||||
counter?.increment();
|
||||
void extLogger.log("Scrubbing query directory. Removing old queries.");
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
if (!(await pathExists(queryDirectory))) {
|
||||
void extLogger.log(
|
||||
`Cannot scrub. Query directory does not exist: ${queryDirectory}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const baseNames = await readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const dir = join(queryDirectory, baseName);
|
||||
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
|
||||
if (scrubResult.errorMsg) {
|
||||
errors.push(scrubResult.errorMsg);
|
||||
@@ -96,7 +96,7 @@ async function scrubQueries(
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(os.EOL + errors.join(os.EOL));
|
||||
throw new Error(EOL + errors.join(EOL));
|
||||
}
|
||||
} catch (e) {
|
||||
void extLogger.log(`Error while scrubbing queries: ${e}`);
|
||||
@@ -115,32 +115,32 @@ async function scrubDirectory(
|
||||
errorMsg?: string;
|
||||
deleted: boolean;
|
||||
}> {
|
||||
const timestampFile = path.join(dir, "timestamp");
|
||||
const timestampFile = join(dir, "timestamp");
|
||||
try {
|
||||
let deleted = true;
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
if (!(await stat(dir)).isDirectory()) {
|
||||
void extLogger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
await remove(dir);
|
||||
} else if (!(await pathExists(timestampFile))) {
|
||||
void extLogger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
await remove(dir);
|
||||
} else if (!(await stat(timestampFile)).isFile()) {
|
||||
void extLogger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
await remove(dir);
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, "utf8");
|
||||
const timestampText = await readFile(timestampFile, "utf8");
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void extLogger.log(
|
||||
` ${dir} has invalid timestamp '${timestampText}'. Deleting.`,
|
||||
);
|
||||
await fs.remove(dir);
|
||||
await remove(dir);
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void extLogger.log(
|
||||
` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`,
|
||||
);
|
||||
await fs.remove(dir);
|
||||
await remove(dir);
|
||||
} else {
|
||||
void extLogger.log(
|
||||
` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join, dirname } from "path";
|
||||
import {
|
||||
commands,
|
||||
Disposable,
|
||||
@@ -48,7 +48,7 @@ import {
|
||||
variantAnalysisStatusToQueryStatus,
|
||||
} from "./query-status";
|
||||
import { slurpQueryHistory, splatQueryHistory } from "./query-serialization";
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { CliVersionConstraint } from "./cli";
|
||||
import { HistoryItemLabelProvider } from "./history-item-label-provider";
|
||||
import { Credentials } from "./authentication";
|
||||
@@ -167,15 +167,12 @@ export class HistoryTreeDataProvider
|
||||
private readonly labelProvider: HistoryItemLabelProvider,
|
||||
) {
|
||||
super();
|
||||
this.failedIconPath = path.join(
|
||||
extensionPath,
|
||||
FAILED_QUERY_HISTORY_ITEM_ICON,
|
||||
);
|
||||
this.localSuccessIconPath = path.join(
|
||||
this.failedIconPath = join(extensionPath, FAILED_QUERY_HISTORY_ITEM_ICON);
|
||||
this.localSuccessIconPath = join(
|
||||
extensionPath,
|
||||
LOCAL_SUCCESS_QUERY_HISTORY_ITEM_ICON,
|
||||
);
|
||||
this.remoteSuccessIconPath = path.join(
|
||||
this.remoteSuccessIconPath = join(
|
||||
extensionPath,
|
||||
REMOTE_SUCCESS_QUERY_HISTORY_ITEM_ICON,
|
||||
);
|
||||
@@ -414,7 +411,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
// This is because the query history is specific to each workspace.
|
||||
// For situations where `ctx.storageUri` is undefined (i.e., there is no workspace),
|
||||
// we default to global storage.
|
||||
this.queryMetadataStorageLocation = path.join(
|
||||
this.queryMetadataStorageLocation = join(
|
||||
(ctx.storageUri || ctx.globalStorageUri).fsPath,
|
||||
WORKSPACE_QUERY_HISTORY_FILE,
|
||||
);
|
||||
@@ -877,7 +874,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (
|
||||
item.t == "local" &&
|
||||
item.completedQuery &&
|
||||
!(await fs.pathExists(item.completedQuery?.query.querySaveDir))
|
||||
!(await pathExists(item.completedQuery?.query.querySaveDir))
|
||||
) {
|
||||
this.treeDataProvider.remove(item);
|
||||
item.completedQuery?.dispose();
|
||||
@@ -1109,7 +1106,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return queryHistoryItem.completedQuery.query.querySaveDir;
|
||||
}
|
||||
} else if (queryHistoryItem.t === "remote") {
|
||||
return path.join(this.queryStorageDir, queryHistoryItem.queryId);
|
||||
return join(this.queryStorageDir, queryHistoryItem.queryId);
|
||||
} else if (queryHistoryItem.t === "variant-analysis") {
|
||||
return this.variantAnalysisManager.getVariantAnalysisStorageLocation(
|
||||
queryHistoryItem.variantAnalysis.id,
|
||||
@@ -1135,19 +1132,19 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
let externalFilePath: string | undefined;
|
||||
if (finalSingleItem.t === "local") {
|
||||
if (finalSingleItem.completedQuery) {
|
||||
externalFilePath = path.join(
|
||||
externalFilePath = join(
|
||||
finalSingleItem.completedQuery.query.querySaveDir,
|
||||
"timestamp",
|
||||
);
|
||||
}
|
||||
} else if (finalSingleItem.t === "remote") {
|
||||
externalFilePath = path.join(
|
||||
externalFilePath = join(
|
||||
this.queryStorageDir,
|
||||
finalSingleItem.queryId,
|
||||
"timestamp",
|
||||
);
|
||||
} else if (finalSingleItem.t === "variant-analysis") {
|
||||
externalFilePath = path.join(
|
||||
externalFilePath = join(
|
||||
this.variantAnalysisManager.getVariantAnalysisStorageLocation(
|
||||
finalSingleItem.variantAnalysis.id,
|
||||
),
|
||||
@@ -1156,11 +1153,11 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
if (externalFilePath) {
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
if (!(await pathExists(externalFilePath))) {
|
||||
// timestamp file is missing (manually deleted?) try selecting the parent folder.
|
||||
// It's less nice, but at least it will work.
|
||||
externalFilePath = path.dirname(externalFilePath);
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
externalFilePath = dirname(externalFilePath);
|
||||
if (!(await pathExists(externalFilePath))) {
|
||||
throw new Error(
|
||||
`Query directory does not exist: ${externalFilePath}`,
|
||||
);
|
||||
@@ -1248,7 +1245,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
// Summary log file doesn't exist.
|
||||
if (
|
||||
finalSingleItem.evalLogLocation &&
|
||||
(await fs.pathExists(finalSingleItem.evalLogLocation))
|
||||
(await pathExists(finalSingleItem.evalLogLocation))
|
||||
) {
|
||||
// If raw log does exist, then the summary log is still being generated.
|
||||
this.warnInProgressEvalLogSummary();
|
||||
|
||||
@@ -3,8 +3,8 @@ import { CancellationTokenSource, env } from "vscode";
|
||||
import * as messages from "./pure/messages-shared";
|
||||
import * as legacyMessages from "./pure/legacy-messages";
|
||||
import * as cli from "./cli";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { basename } from "path";
|
||||
import {
|
||||
RawResultsSortState,
|
||||
SortedResultSetInfo,
|
||||
@@ -161,7 +161,7 @@ export async function interpretResultsSarif(
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
let res;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
if (await pathExists(interpretedResultsPath)) {
|
||||
res = await sarifParser(interpretedResultsPath);
|
||||
} else {
|
||||
res = await cli.interpretBqrsSarif(
|
||||
@@ -184,7 +184,7 @@ export async function interpretGraphResults(
|
||||
sourceInfo?: cli.SourceInfo,
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
if (await pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: "GraphInterpretationData" };
|
||||
}
|
||||
@@ -271,9 +271,9 @@ export class LocalQueryInfo {
|
||||
if (this.initialInfo.quickEvalPosition) {
|
||||
const { line, endLine, fileName } = this.initialInfo.quickEvalPosition;
|
||||
const lineInfo = line === endLine ? `${line}` : `${line}-${endLine}`;
|
||||
return `${path.basename(fileName)}:${lineInfo}`;
|
||||
return `${basename(fileName)}:${lineInfo}`;
|
||||
}
|
||||
return path.basename(this.initialInfo.queryPath);
|
||||
return basename(this.initialInfo.queryPath);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { pathExists, readFile, remove, mkdir, writeFile } from "fs-extra";
|
||||
import { dirname } from "path";
|
||||
|
||||
import { showAndLogErrorMessage } from "./helpers";
|
||||
import {
|
||||
@@ -17,11 +17,11 @@ export async function slurpQueryHistory(
|
||||
fsPath: string,
|
||||
): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
if (!(await pathExists(fsPath))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await fs.readFile(fsPath, "utf8");
|
||||
const data = await readFile(fsPath, "utf8");
|
||||
const obj = JSON.parse(data);
|
||||
if (![1, 2].includes(obj.version)) {
|
||||
void showAndLogErrorMessage(
|
||||
@@ -89,7 +89,7 @@ export async function slurpQueryHistory(
|
||||
return true;
|
||||
}
|
||||
const resultsPath = q.completedQuery?.query.resultsPaths.resultsPath;
|
||||
return !!resultsPath && (await fs.pathExists(resultsPath));
|
||||
return !!resultsPath && (await pathExists(resultsPath));
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage("Error loading query history.", {
|
||||
@@ -98,7 +98,7 @@ export async function slurpQueryHistory(
|
||||
),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
await remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -117,8 +117,8 @@ export async function splatQueryHistory(
|
||||
fsPath: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
await fs.mkdir(path.dirname(fsPath), { recursive: true });
|
||||
if (!(await pathExists(fsPath))) {
|
||||
await mkdir(dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter((q) =>
|
||||
@@ -135,7 +135,7 @@ export async function splatQueryHistory(
|
||||
null,
|
||||
2,
|
||||
);
|
||||
await fs.writeFile(fsPath, data);
|
||||
await writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { dirname } from "path";
|
||||
import { ensureFile } from "fs-extra";
|
||||
|
||||
import { DisposableObject } from "../pure/disposable-object";
|
||||
import { CancellationToken, commands } from "vscode";
|
||||
@@ -140,7 +140,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
}
|
||||
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
await ensureFile(structuredLogFile);
|
||||
|
||||
args.push("--evaluator-log");
|
||||
args.push(structuredLogFile);
|
||||
@@ -236,9 +236,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.runQuery.method) {
|
||||
this.activeQueryLogFile = findQueryLogFile(
|
||||
path.dirname(
|
||||
path.dirname((parameter as messages.RunQueryParams).outputPath),
|
||||
),
|
||||
dirname(dirname((parameter as messages.RunQueryParams).outputPath)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import { CancellationToken } from "vscode";
|
||||
import * as cli from "../cli";
|
||||
import { ProgressCallback } from "../commandRunner";
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
} from "../helpers";
|
||||
import { extLogger } from "../common";
|
||||
import * as messages from "../pure/new-messages";
|
||||
import * as legacyMessages from "../pure/legacy-messages";
|
||||
import { QueryResultType } from "../pure/legacy-messages";
|
||||
import { InitialQueryInfo, LocalQueryInfo } from "../query-results";
|
||||
import { QueryEvaluationInfo, QueryWithResults } from "../run-queries-shared";
|
||||
import * as qsClient from "./queryserver-client";
|
||||
@@ -52,7 +52,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
|
||||
const hasMetadataFile = await dbItem.hasMetadataFile();
|
||||
const query = new QueryEvaluationInfo(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
initialInfo.quickEvalPosition,
|
||||
@@ -143,8 +143,8 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
evaluationTime: result.evaluationTime,
|
||||
queryId: 0,
|
||||
resultType: successful
|
||||
? legacyMessages.QueryResultType.SUCCESS
|
||||
: legacyMessages.QueryResultType.OTHER_ERROR,
|
||||
? QueryResultType.SUCCESS
|
||||
: QueryResultType.OTHER_ERROR,
|
||||
runId: 0,
|
||||
message,
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as path from "path";
|
||||
import { ensureDir, writeFile, pathExists, readFile } from "fs-extra";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { basename, join } from "path";
|
||||
import {
|
||||
CancellationToken,
|
||||
ExtensionContext,
|
||||
@@ -26,7 +26,7 @@ const QUICK_QUERY_WORKSPACE_FOLDER_NAME = "Quick Queries";
|
||||
const QLPACK_FILE_HEADER = "# This is an automatically generated file.\n\n";
|
||||
|
||||
export function isQuickQueryPath(queryPath: string): boolean {
|
||||
return path.basename(queryPath) === QUICK_QUERY_QUERY_NAME;
|
||||
return basename(queryPath) === QUICK_QUERY_QUERY_NAME;
|
||||
}
|
||||
|
||||
async function getQuickQueriesDir(ctx: ExtensionContext): Promise<string> {
|
||||
@@ -34,8 +34,8 @@ async function getQuickQueriesDir(ctx: ExtensionContext): Promise<string> {
|
||||
if (storagePath === undefined) {
|
||||
throw new Error("Workspace storage path is undefined");
|
||||
}
|
||||
const queriesPath = path.join(storagePath, QUICK_QUERIES_DIR_NAME);
|
||||
await fs.ensureDir(queriesPath, { mode: 0o700 });
|
||||
const queriesPath = join(storagePath, QUICK_QUERIES_DIR_NAME);
|
||||
await ensureDir(queriesPath, { mode: 0o700 });
|
||||
return queriesPath;
|
||||
}
|
||||
|
||||
@@ -48,8 +48,7 @@ function updateQuickQueryDir(queriesDir: string, index: number, len: number) {
|
||||
|
||||
function findExistingQuickQueryEditor() {
|
||||
return Window.visibleTextEditors.find(
|
||||
(editor) =>
|
||||
path.basename(editor.document.uri.fsPath) === QUICK_QUERY_QUERY_NAME,
|
||||
(editor) => basename(editor.document.uri.fsPath) === QUICK_QUERY_QUERY_NAME,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -113,8 +112,8 @@ export async function displayQuickQuery(
|
||||
const dbscheme = await getPrimaryDbscheme(datasetFolder);
|
||||
const qlpack = (await getQlPackForDbscheme(cliServer, dbscheme))
|
||||
.dbschemePack;
|
||||
const qlPackFile = path.join(queriesDir, "qlpack.yml");
|
||||
const qlFile = path.join(queriesDir, QUICK_QUERY_QUERY_NAME);
|
||||
const qlPackFile = join(queriesDir, "qlpack.yml");
|
||||
const qlFile = join(queriesDir, QUICK_QUERY_QUERY_NAME);
|
||||
const shouldRewrite = await checkShouldRewrite(qlPackFile, qlpack);
|
||||
|
||||
// Only rewrite the qlpack file if the database has changed
|
||||
@@ -126,15 +125,15 @@ export async function displayQuickQuery(
|
||||
[qlpack]: "*",
|
||||
},
|
||||
};
|
||||
await fs.writeFile(
|
||||
await writeFile(
|
||||
qlPackFile,
|
||||
QLPACK_FILE_HEADER + yaml.dump(quickQueryQlpackYaml),
|
||||
QLPACK_FILE_HEADER + dump(quickQueryQlpackYaml),
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
|
||||
if (shouldRewrite || !(await fs.pathExists(qlFile))) {
|
||||
await fs.writeFile(
|
||||
if (shouldRewrite || !(await pathExists(qlFile))) {
|
||||
await writeFile(
|
||||
qlFile,
|
||||
getInitialQueryContents(dbItem.language, dbscheme),
|
||||
"utf8",
|
||||
@@ -160,9 +159,9 @@ export async function displayQuickQuery(
|
||||
}
|
||||
|
||||
async function checkShouldRewrite(qlPackFile: string, newDependency: string) {
|
||||
if (!(await fs.pathExists(qlPackFile))) {
|
||||
if (!(await pathExists(qlPackFile))) {
|
||||
return true;
|
||||
}
|
||||
const qlPackContents: any = yaml.load(await fs.readFile(qlPackFile, "utf8"));
|
||||
const qlPackContents: any = load(await readFile(qlPackFile, "utf8"));
|
||||
return !qlPackContents.dependencies?.[newDependency];
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
import { extname } from "path";
|
||||
import { CancellationToken, ExtensionContext } from "vscode";
|
||||
|
||||
import { Credentials } from "../authentication";
|
||||
@@ -121,7 +121,7 @@ export class AnalysesResultsManager {
|
||||
}
|
||||
|
||||
if (allFailures.length > 0) {
|
||||
throw Error(allFailures.join(os.EOL));
|
||||
throw Error(allFailures.join(EOL));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,7 +178,7 @@ export class AnalysesResultsManager {
|
||||
);
|
||||
|
||||
let newAnaysisResults: AnalysisResults;
|
||||
const fileExtension = path.extname(artifactPath);
|
||||
const fileExtension = extname(artifactPath);
|
||||
if (fileExtension === ".sarif") {
|
||||
const queryResults = await this.readSarifResults(
|
||||
artifactPath,
|
||||
@@ -225,7 +225,7 @@ export class AnalysesResultsManager {
|
||||
private async isAnalysisDownloaded(
|
||||
analysis: AnalysisSummary,
|
||||
): Promise<boolean> {
|
||||
return await fs.pathExists(
|
||||
return await pathExists(
|
||||
createDownloadPath(this.storagePath, analysis.downloadLink),
|
||||
);
|
||||
}
|
||||
@@ -253,9 +253,7 @@ export class AnalysesResultsManager {
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(
|
||||
`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(
|
||||
os.EOL,
|
||||
)}`,
|
||||
`Error processing SARIF file: ${EOL}${processedSarif.errors.join(EOL)}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
@@ -40,7 +40,7 @@ export function createDownloadPath(
|
||||
downloadLink: DownloadLink,
|
||||
extension = "",
|
||||
) {
|
||||
return path.join(
|
||||
return join(
|
||||
storagePath,
|
||||
downloadLink.queryId,
|
||||
downloadLink.id + (extension ? `.${extension}` : ""),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { ensureDir, writeFile } from "fs-extra";
|
||||
|
||||
import {
|
||||
window,
|
||||
@@ -101,10 +101,7 @@ export async function exportRemoteQueryResults(
|
||||
|
||||
const exportDirectory =
|
||||
await queryHistoryManager.getQueryHistoryItemDirectory(queryHistoryItem);
|
||||
const exportedResultsDirectory = path.join(
|
||||
exportDirectory,
|
||||
"exported-results",
|
||||
);
|
||||
const exportedResultsDirectory = join(exportDirectory, "exported-results");
|
||||
|
||||
await exportRemoteQueryAnalysisResults(
|
||||
ctx,
|
||||
@@ -214,7 +211,7 @@ export async function exportVariantAnalysisResults(
|
||||
.toISOString()
|
||||
.replace(/[-:]/g, "")
|
||||
.replace(/\.\d+Z$/, "Z");
|
||||
const exportedResultsDirectory = path.join(
|
||||
const exportedResultsDirectory = join(
|
||||
exportDirectory,
|
||||
"exported-results",
|
||||
`results_${formattedDate}`,
|
||||
@@ -373,20 +370,17 @@ async function exportToLocalMarkdown(
|
||||
exportedResultsPath: string,
|
||||
markdownFiles: MarkdownFile[],
|
||||
) {
|
||||
await fs.ensureDir(exportedResultsPath);
|
||||
await ensureDir(exportedResultsPath);
|
||||
for (const markdownFile of markdownFiles) {
|
||||
const filePath = path.join(
|
||||
exportedResultsPath,
|
||||
`${markdownFile.fileName}.md`,
|
||||
);
|
||||
await fs.writeFile(filePath, markdownFile.content.join("\n"), "utf8");
|
||||
const filePath = join(exportedResultsPath, `${markdownFile.fileName}.md`);
|
||||
await writeFile(filePath, markdownFile.content.join("\n"), "utf8");
|
||||
}
|
||||
const shouldOpenExportedResults = await showInformationMessageWithAction(
|
||||
`Variant analysis results exported to \"${exportedResultsPath}\".`,
|
||||
"Open exported results",
|
||||
);
|
||||
if (shouldOpenExportedResults) {
|
||||
const summaryFilePath = path.join(exportedResultsPath, "_summary.md");
|
||||
const summaryFilePath = join(exportedResultsPath, "_summary.md");
|
||||
const summaryFile = await workspace.openTextDocument(summaryFilePath);
|
||||
await window.showTextDocument(summaryFile, ViewColumn.One);
|
||||
await commands.executeCommand("revealFileInOS", Uri.file(summaryFilePath));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { pathExists, readFile, writeFile } from "fs-extra";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
@@ -160,7 +160,7 @@ export async function downloadArtifactFromLink(
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
if (!(await pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(
|
||||
`GET ${downloadLink.urlPath}/zip`,
|
||||
@@ -171,7 +171,7 @@ export async function downloadArtifactFromLink(
|
||||
|
||||
await unzipBuffer(response.data as ArrayBuffer, zipFilePath, extractedPath);
|
||||
}
|
||||
return path.join(extractedPath, downloadLink.innerFilePath || "");
|
||||
return join(extractedPath, downloadLink.innerFilePath || "");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -220,17 +220,14 @@ async function getResultIndex(
|
||||
repo,
|
||||
artifactId,
|
||||
);
|
||||
const indexFilePath = path.join(artifactPath, "index.json");
|
||||
if (!(await fs.pathExists(indexFilePath))) {
|
||||
const indexFilePath = join(artifactPath, "index.json");
|
||||
if (!(await pathExists(indexFilePath))) {
|
||||
void showAndLogWarningMessage(
|
||||
"Could not find an `index.json` file in the result artifact.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await fs.readFile(
|
||||
path.join(artifactPath, "index.json"),
|
||||
"utf8",
|
||||
);
|
||||
const resultIndex = await readFile(join(artifactPath, "index.json"), "utf8");
|
||||
|
||||
try {
|
||||
return JSON.parse(resultIndex);
|
||||
@@ -370,7 +367,7 @@ async function downloadArtifact(
|
||||
artifact_id: artifactId,
|
||||
archive_format: "zip",
|
||||
});
|
||||
const artifactPath = path.join(tmpDir.name, `${artifactId}`);
|
||||
const artifactPath = join(tmpDir.name, `${artifactId}`);
|
||||
await unzipBuffer(
|
||||
response.data as ArrayBuffer,
|
||||
`${artifactPath}.zip`,
|
||||
@@ -385,7 +382,7 @@ async function unzipBuffer(
|
||||
destinationPath: string,
|
||||
): Promise<void> {
|
||||
void extLogger.log(`Saving file to ${filePath}`);
|
||||
await fs.writeFile(filePath, Buffer.from(data));
|
||||
await writeFile(filePath, Buffer.from(data));
|
||||
|
||||
void extLogger.log(`Unzipping file to ${destinationPath}`);
|
||||
await unzipFile(filePath, destinationPath);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as os from "os";
|
||||
import { EOL } from "os";
|
||||
import { Credentials } from "../authentication";
|
||||
import { RepositorySelection } from "./repository-selection";
|
||||
import { Repository } from "./shared/repository";
|
||||
import { RemoteQueriesResponse } from "./gh-api/remote-queries";
|
||||
import * as ghApiClient from "./gh-api/gh-api-client";
|
||||
import { submitRemoteQueries } from "./gh-api/gh-api-client";
|
||||
import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
@@ -20,7 +20,7 @@ export async function runRemoteQueriesApiRequest(
|
||||
queryPackBase64: string,
|
||||
): Promise<void | RemoteQueriesResponse> {
|
||||
try {
|
||||
const response = await ghApiClient.submitRemoteQueries(credentials, {
|
||||
const response = await submitRemoteQueries(credentials, {
|
||||
ref,
|
||||
language,
|
||||
repositories: repoSelection.repositories,
|
||||
@@ -48,8 +48,8 @@ export async function runRemoteQueriesApiRequest(
|
||||
}
|
||||
}
|
||||
|
||||
const eol = os.EOL;
|
||||
const eol2 = os.EOL + os.EOL;
|
||||
const eol = EOL;
|
||||
const eol2 = EOL + EOL;
|
||||
|
||||
// exported for testing only
|
||||
export function parseResponse(
|
||||
|
||||
@@ -7,9 +7,9 @@ import {
|
||||
env,
|
||||
} from "vscode";
|
||||
import { nanoid } from "nanoid";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import { join } from "path";
|
||||
import { writeFile, readFile, remove, pathExists } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
|
||||
import { Credentials } from "../authentication";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
@@ -324,7 +324,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
"]",
|
||||
];
|
||||
|
||||
await env.clipboard.writeText(text.join(os.EOL));
|
||||
await env.clipboard.writeText(text.join(EOL));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -403,7 +403,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
*
|
||||
*/
|
||||
private async prepareStorageDirectory(queryId: string): Promise<void> {
|
||||
await createTimestampFile(path.join(this.storagePath, queryId));
|
||||
await createTimestampFile(join(this.storagePath, queryId));
|
||||
}
|
||||
|
||||
private async getRemoteQueryResult(
|
||||
@@ -420,26 +420,26 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
fileName: string,
|
||||
obj: T,
|
||||
): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
await fs.writeFile(filePath, JSON.stringify(obj, null, 2), "utf8");
|
||||
const filePath = join(this.storagePath, queryId, fileName);
|
||||
await writeFile(filePath, JSON.stringify(obj, null, 2), "utf8");
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(
|
||||
queryId: string,
|
||||
fileName: string,
|
||||
): Promise<T> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
return JSON.parse(await fs.readFile(filePath, "utf8"));
|
||||
const filePath = join(this.storagePath, queryId, fileName);
|
||||
return JSON.parse(await readFile(filePath, "utf8"));
|
||||
}
|
||||
|
||||
private async removeStorageDirectory(queryId: string): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
await fs.remove(filePath);
|
||||
const filePath = join(this.storagePath, queryId);
|
||||
await remove(filePath);
|
||||
}
|
||||
|
||||
private async queryRecordExists(queryId: string): Promise<boolean> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
return await fs.pathExists(filePath);
|
||||
const filePath = join(this.storagePath, queryId);
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
workspace,
|
||||
commands,
|
||||
} from "vscode";
|
||||
import * as path from "path";
|
||||
import { basename } from "path";
|
||||
|
||||
import {
|
||||
ToRemoteQueriesMessage,
|
||||
@@ -87,7 +87,7 @@ export class RemoteQueriesView extends AbstractWebview<
|
||||
query: RemoteQuery,
|
||||
queryResult: RemoteQueryResult,
|
||||
): RemoteQueryResultViewModel {
|
||||
const queryFileName = path.basename(query.queryFilePath);
|
||||
const queryFileName = basename(query.queryFilePath);
|
||||
const totalResultCount = sumAnalysisSummariesResults(
|
||||
queryResult.analysisSummaries,
|
||||
);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile } from "fs-extra";
|
||||
import { Repository as RemoteRepository } from "./repository";
|
||||
import { QueryMetadata } from "../pure/interface-types";
|
||||
import { getQueryName } from "./run-remote-query";
|
||||
@@ -25,7 +25,7 @@ export async function buildRemoteQueryEntity(
|
||||
repositoryCount: number,
|
||||
): Promise<RemoteQuery> {
|
||||
const queryName = getQueryName(queryMetadata, queryFilePath);
|
||||
const queryText = await fs.readFile(queryFilePath, "utf8");
|
||||
const queryText = await readFile(queryFilePath, "utf8");
|
||||
const [owner, name] = controllerRepo.fullName.split("/");
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as fs from "fs-extra";
|
||||
import { pathExists as fs_pathExists, stat, readFile } from "fs-extra";
|
||||
import { QuickPickItem, window } from "vscode";
|
||||
import { extLogger } from "../common";
|
||||
import {
|
||||
@@ -144,12 +144,12 @@ async function readExternalRepoLists(): Promise<RepoList[]> {
|
||||
}
|
||||
|
||||
async function validateExternalRepoListsFile(path: string): Promise<void> {
|
||||
const pathExists = await fs.pathExists(path);
|
||||
const pathExists = await fs_pathExists(path);
|
||||
if (!pathExists) {
|
||||
throw Error(`External repository lists file does not exist at ${path}`);
|
||||
}
|
||||
|
||||
const pathStat = await fs.stat(path);
|
||||
const pathStat = await stat(path);
|
||||
if (pathStat.isDirectory()) {
|
||||
throw Error(
|
||||
"External repository lists path should not point to a directory",
|
||||
@@ -163,7 +163,7 @@ async function readExternalRepoListsJson(
|
||||
let json;
|
||||
|
||||
try {
|
||||
const fileContents = await fs.readFile(path, "utf8");
|
||||
const fileContents = await readFile(path, "utf8");
|
||||
json = await JSON.parse(fileContents);
|
||||
} catch (error) {
|
||||
throw Error("Invalid repository lists file. It should contain valid JSON.");
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { CancellationToken, Uri, window } from "vscode";
|
||||
import * as path from "path";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as fs from "fs-extra";
|
||||
import * as tmp from "tmp-promise";
|
||||
import { relative, join, sep, dirname, parse, basename } from "path";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { pathExists, copy, writeFile, readFile, mkdirp } from "fs-extra";
|
||||
import { dir, tmpName } from "tmp-promise";
|
||||
import {
|
||||
askForLanguage,
|
||||
findLanguage,
|
||||
@@ -22,7 +22,7 @@ import { ProgressCallback, UserCancellationException } from "../commandRunner";
|
||||
import { RequestError } from "@octokit/types/dist-types";
|
||||
import { QueryMetadata } from "../pure/interface-types";
|
||||
import { getErrorMessage, REPO_REGEX } from "../pure/helpers-pure";
|
||||
import * as ghApiClient from "./gh-api/gh-api-client";
|
||||
import { getRepositoryFromNwo } from "./gh-api/gh-api-client";
|
||||
import {
|
||||
getRepositorySelection,
|
||||
isValidSelection,
|
||||
@@ -61,18 +61,18 @@ async function generateQueryPack(
|
||||
queryPackDir: string,
|
||||
): Promise<GeneratedQueryPack> {
|
||||
const originalPackRoot = await findPackRoot(queryFile);
|
||||
const packRelativePath = path.relative(originalPackRoot, queryFile);
|
||||
const targetQueryFileName = path.join(queryPackDir, packRelativePath);
|
||||
const packRelativePath = relative(originalPackRoot, queryFile);
|
||||
const targetQueryFileName = join(queryPackDir, packRelativePath);
|
||||
|
||||
let language: string | undefined;
|
||||
if (await fs.pathExists(path.join(originalPackRoot, "qlpack.yml"))) {
|
||||
if (await pathExists(join(originalPackRoot, "qlpack.yml"))) {
|
||||
// don't include ql files. We only want the queryFile to be copied.
|
||||
const toCopy = await cliServer.packPacklist(originalPackRoot, false);
|
||||
|
||||
// also copy the lock file (either new name or old name) and the query file itself. These are not included in the packlist.
|
||||
[
|
||||
path.join(originalPackRoot, "qlpack.lock.yml"),
|
||||
path.join(originalPackRoot, "codeql-pack.lock.yml"),
|
||||
join(originalPackRoot, "qlpack.lock.yml"),
|
||||
join(originalPackRoot, "codeql-pack.lock.yml"),
|
||||
queryFile,
|
||||
].forEach((absolutePath) => {
|
||||
if (absolutePath) {
|
||||
@@ -81,15 +81,14 @@ async function generateQueryPack(
|
||||
});
|
||||
|
||||
let copiedCount = 0;
|
||||
await fs.copy(originalPackRoot, queryPackDir, {
|
||||
await copy(originalPackRoot, queryPackDir, {
|
||||
filter: (file: string) =>
|
||||
// copy file if it is in the packlist, or it is a parent directory of a file in the packlist
|
||||
!!toCopy.find((f) => {
|
||||
// Normalized paths ensure that Windows drive letters are capitalized consistently.
|
||||
const normalizedPath = Uri.file(f).fsPath;
|
||||
const matches =
|
||||
normalizedPath === file ||
|
||||
normalizedPath.startsWith(file + path.sep);
|
||||
normalizedPath === file || normalizedPath.startsWith(file + sep);
|
||||
if (matches) {
|
||||
copiedCount++;
|
||||
}
|
||||
@@ -109,7 +108,7 @@ async function generateQueryPack(
|
||||
// copy only the query file to the query pack directory
|
||||
// and generate a synthetic query pack
|
||||
void extLogger.log(`Copying ${queryFile} to ${queryPackDir}`);
|
||||
await fs.copy(queryFile, targetQueryFileName);
|
||||
await copy(queryFile, targetQueryFileName);
|
||||
void extLogger.log("Generating synthetic query pack");
|
||||
const syntheticQueryPack = {
|
||||
name: QUERY_PACK_NAME,
|
||||
@@ -119,10 +118,7 @@ async function generateQueryPack(
|
||||
},
|
||||
defaultSuite: generateDefaultSuite(packRelativePath),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(queryPackDir, "qlpack.yml"),
|
||||
yaml.dump(syntheticQueryPack),
|
||||
);
|
||||
await writeFile(join(queryPackDir, "qlpack.yml"), dump(syntheticQueryPack));
|
||||
}
|
||||
if (!language) {
|
||||
throw new UserCancellationException("Could not determine language.");
|
||||
@@ -133,7 +129,7 @@ async function generateQueryPack(
|
||||
|
||||
let precompilationOpts: string[] = [];
|
||||
if (await cliServer.cliConstraints.supportsQlxRemote()) {
|
||||
const ccache = path.join(originalPackRoot, ".cache");
|
||||
const ccache = join(originalPackRoot, ".cache");
|
||||
precompilationOpts = [
|
||||
"--qlx",
|
||||
"--no-default-compilation-cache",
|
||||
@@ -155,7 +151,7 @@ async function generateQueryPack(
|
||||
bundlePath,
|
||||
precompilationOpts,
|
||||
);
|
||||
const base64Pack = (await fs.readFile(bundlePath)).toString("base64");
|
||||
const base64Pack = (await readFile(bundlePath)).toString("base64");
|
||||
return {
|
||||
base64Pack,
|
||||
language,
|
||||
@@ -164,13 +160,13 @@ async function generateQueryPack(
|
||||
|
||||
async function findPackRoot(queryFile: string): Promise<string> {
|
||||
// recursively find the directory containing qlpack.yml
|
||||
let dir = path.dirname(queryFile);
|
||||
while (!(await fs.pathExists(path.join(dir, "qlpack.yml")))) {
|
||||
dir = path.dirname(dir);
|
||||
let dir = dirname(queryFile);
|
||||
while (!(await pathExists(join(dir, "qlpack.yml")))) {
|
||||
dir = dirname(dir);
|
||||
if (isFileSystemRoot(dir)) {
|
||||
// there is no qlpack.yml in this directory or any parent directory.
|
||||
// just use the query file's directory as the pack root.
|
||||
return path.dirname(queryFile);
|
||||
return dirname(queryFile);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,23 +174,23 @@ async function findPackRoot(queryFile: string): Promise<string> {
|
||||
}
|
||||
|
||||
function isFileSystemRoot(dir: string): boolean {
|
||||
const pathObj = path.parse(dir);
|
||||
const pathObj = parse(dir);
|
||||
return pathObj.root === dir && pathObj.base === "";
|
||||
}
|
||||
|
||||
export async function createRemoteQueriesTempDirectory() {
|
||||
const remoteQueryDir = await tmp.dir({
|
||||
const remoteQueryDir = await dir({
|
||||
dir: tmpDir.name,
|
||||
unsafeCleanup: true,
|
||||
});
|
||||
const queryPackDir = path.join(remoteQueryDir.path, "query-pack");
|
||||
await fs.mkdirp(queryPackDir);
|
||||
const queryPackDir = join(remoteQueryDir.path, "query-pack");
|
||||
await mkdirp(queryPackDir);
|
||||
return { remoteQueryDir, queryPackDir };
|
||||
}
|
||||
|
||||
async function getPackedBundlePath(queryPackDir: string) {
|
||||
return tmp.tmpName({
|
||||
dir: path.dirname(queryPackDir),
|
||||
return tmpName({
|
||||
dir: dirname(queryPackDir),
|
||||
postfix: "generated.tgz",
|
||||
prefix: "qlpack",
|
||||
});
|
||||
@@ -316,8 +312,8 @@ async function fixPackFile(
|
||||
queryPackDir: string,
|
||||
packRelativePath: string,
|
||||
): Promise<void> {
|
||||
const packPath = path.join(queryPackDir, "qlpack.yml");
|
||||
const qlpack = yaml.load(await fs.readFile(packPath, "utf8")) as QlPack;
|
||||
const packPath = join(queryPackDir, "qlpack.yml");
|
||||
const qlpack = load(await readFile(packPath, "utf8")) as QlPack;
|
||||
|
||||
// update pack name
|
||||
qlpack.name = QUERY_PACK_NAME;
|
||||
@@ -329,7 +325,7 @@ async function fixPackFile(
|
||||
// remove any ${workspace} version references
|
||||
removeWorkspaceRefs(qlpack);
|
||||
|
||||
await fs.writeFile(packPath, yaml.dump(qlpack));
|
||||
await writeFile(packPath, dump(qlpack));
|
||||
}
|
||||
|
||||
function generateDefaultSuite(packRelativePath: string) {
|
||||
@@ -348,7 +344,7 @@ export function getQueryName(
|
||||
queryFilePath: string,
|
||||
): string {
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
return queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
return queryMetadata?.name ?? basename(queryFilePath);
|
||||
}
|
||||
|
||||
export async function getControllerRepo(
|
||||
@@ -390,11 +386,7 @@ export async function getControllerRepo(
|
||||
const [owner, repo] = controllerRepoNwo.split("/");
|
||||
|
||||
try {
|
||||
const controllerRepo = await ghApiClient.getRepositoryFromNwo(
|
||||
credentials,
|
||||
owner,
|
||||
repo,
|
||||
);
|
||||
const controllerRepo = await getRepositoryFromNwo(credentials, owner, repo);
|
||||
void extLogger.log(`Controller repository ID: ${controllerRepo.id}`);
|
||||
return {
|
||||
id: controllerRepo.id,
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
import * as ghApiClient from "./gh-api/gh-api-client";
|
||||
import {
|
||||
submitVariantAnalysis,
|
||||
getVariantAnalysisRepo,
|
||||
} from "./gh-api/gh-api-client";
|
||||
import {
|
||||
CancellationToken,
|
||||
commands,
|
||||
@@ -41,8 +44,8 @@ import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
} from "../helpers";
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import { readFile, readJson, remove, pathExists, outputJson } from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
import { cancelVariantAnalysis } from "./gh-api/gh-actions-api-client";
|
||||
import { ProgressCallback, UserCancellationException } from "../commandRunner";
|
||||
import { CodeQLCliServer } from "../cli";
|
||||
@@ -142,7 +145,7 @@ export class VariantAnalysisManager
|
||||
);
|
||||
}
|
||||
|
||||
const queryText = await fs.readFile(queryFile, "utf8");
|
||||
const queryText = await readFile(queryFile, "utf8");
|
||||
|
||||
const variantAnalysisSubmission: VariantAnalysisSubmission = {
|
||||
startTime: queryStartTime,
|
||||
@@ -162,7 +165,7 @@ export class VariantAnalysisManager
|
||||
},
|
||||
};
|
||||
|
||||
const variantAnalysisResponse = await ghApiClient.submitVariantAnalysis(
|
||||
const variantAnalysisResponse = await submitVariantAnalysis(
|
||||
credentials,
|
||||
variantAnalysisSubmission,
|
||||
);
|
||||
@@ -197,7 +200,7 @@ export class VariantAnalysisManager
|
||||
await this.setVariantAnalysis(variantAnalysis);
|
||||
|
||||
try {
|
||||
const repoStates = await fs.readJson(
|
||||
const repoStates = await readJson(
|
||||
this.getRepoStatesStoragePath(variantAnalysis.id),
|
||||
);
|
||||
this.repoStates.set(variantAnalysis.id, repoStates);
|
||||
@@ -245,7 +248,7 @@ export class VariantAnalysisManager
|
||||
private async removeStorageDirectory(variantAnalysisId: number) {
|
||||
const storageLocation =
|
||||
this.getVariantAnalysisStorageLocation(variantAnalysisId);
|
||||
await fs.remove(storageLocation);
|
||||
await remove(storageLocation);
|
||||
}
|
||||
|
||||
public async showView(variantAnalysisId: number): Promise<void> {
|
||||
@@ -321,7 +324,7 @@ export class VariantAnalysisManager
|
||||
variantAnalysisId: number,
|
||||
): Promise<boolean> {
|
||||
const filePath = this.getVariantAnalysisStorageLocation(variantAnalysisId);
|
||||
return await fs.pathExists(filePath);
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
private async shouldCancelMonitorVariantAnalysis(
|
||||
@@ -431,7 +434,7 @@ export class VariantAnalysisManager
|
||||
|
||||
let repoTask: VariantAnalysisRepositoryTask;
|
||||
try {
|
||||
const repoTaskResponse = await ghApiClient.getVariantAnalysisRepo(
|
||||
const repoTaskResponse = await getVariantAnalysisRepo(
|
||||
credentials,
|
||||
variantAnalysis.controllerRepo.id,
|
||||
variantAnalysis.id,
|
||||
@@ -478,7 +481,7 @@ export class VariantAnalysisManager
|
||||
VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||
await this.onRepoStateUpdated(variantAnalysis.id, repoState);
|
||||
|
||||
await fs.outputJson(
|
||||
await outputJson(
|
||||
this.getRepoStatesStoragePath(variantAnalysis.id),
|
||||
this.repoStates.get(variantAnalysis.id),
|
||||
);
|
||||
@@ -503,7 +506,7 @@ export class VariantAnalysisManager
|
||||
}
|
||||
|
||||
public getVariantAnalysisStorageLocation(variantAnalysisId: number): string {
|
||||
return path.join(this.storagePath, `${variantAnalysisId}`);
|
||||
return join(this.storagePath, `${variantAnalysisId}`);
|
||||
}
|
||||
|
||||
public async cancelVariantAnalysis(variantAnalysisId: number) {
|
||||
@@ -557,11 +560,11 @@ export class VariantAnalysisManager
|
||||
"]",
|
||||
];
|
||||
|
||||
await env.clipboard.writeText(text.join(os.EOL));
|
||||
await env.clipboard.writeText(text.join(EOL));
|
||||
}
|
||||
|
||||
private getRepoStatesStoragePath(variantAnalysisId: number): string {
|
||||
return path.join(
|
||||
return join(
|
||||
this.getVariantAnalysisStorageLocation(variantAnalysisId),
|
||||
VariantAnalysisManager.REPO_STATES_FILENAME,
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
ExtensionContext,
|
||||
} from "vscode";
|
||||
import { Credentials } from "../authentication";
|
||||
import * as ghApiClient from "./gh-api/gh-api-client";
|
||||
import { getVariantAnalysis } from "./gh-api/gh-api-client";
|
||||
|
||||
import {
|
||||
isFinalVariantAnalysisStatus,
|
||||
@@ -60,7 +60,7 @@ export class VariantAnalysisMonitor extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
const variantAnalysisSummary = await ghApiClient.getVariantAnalysis(
|
||||
const variantAnalysisSummary = await getVariantAnalysis(
|
||||
credentials,
|
||||
variantAnalysis.controllerRepo.id,
|
||||
variantAnalysis.id,
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import {
|
||||
pathExists,
|
||||
mkdir,
|
||||
outputJson,
|
||||
writeFileSync,
|
||||
readJson,
|
||||
} from "fs-extra";
|
||||
import { EOL } from "os";
|
||||
import { join } from "path";
|
||||
|
||||
import { Credentials } from "../authentication";
|
||||
import { Logger } from "../common";
|
||||
@@ -15,7 +21,7 @@ import {
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
} from "./shared/variant-analysis";
|
||||
import { DisposableObject, DisposeHandler } from "../pure/disposable-object";
|
||||
import * as ghApiClient from "./gh-api/gh-api-client";
|
||||
import { getVariantAnalysisRepoResult } from "./gh-api/gh-api-client";
|
||||
import { EventEmitter } from "vscode";
|
||||
import { unzipFile } from "../pure/zip";
|
||||
|
||||
@@ -79,30 +85,27 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
repoTask.repository.fullName,
|
||||
);
|
||||
|
||||
const result = await ghApiClient.getVariantAnalysisRepoResult(
|
||||
const result = await getVariantAnalysisRepoResult(
|
||||
credentials,
|
||||
repoTask.artifactUrl,
|
||||
);
|
||||
|
||||
if (!(await fs.pathExists(resultDirectory))) {
|
||||
await fs.mkdir(resultDirectory, { recursive: true });
|
||||
if (!(await pathExists(resultDirectory))) {
|
||||
await mkdir(resultDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
await fs.outputJson(
|
||||
path.join(
|
||||
resultDirectory,
|
||||
VariantAnalysisResultsManager.REPO_TASK_FILENAME,
|
||||
),
|
||||
await outputJson(
|
||||
join(resultDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME),
|
||||
repoTask,
|
||||
);
|
||||
|
||||
const zipFilePath = path.join(resultDirectory, "results.zip");
|
||||
const unzippedFilesDirectory = path.join(
|
||||
const zipFilePath = join(resultDirectory, "results.zip");
|
||||
const unzippedFilesDirectory = join(
|
||||
resultDirectory,
|
||||
VariantAnalysisResultsManager.RESULTS_DIRECTORY,
|
||||
);
|
||||
|
||||
fs.writeFileSync(zipFilePath, Buffer.from(result));
|
||||
writeFileSync(zipFilePath, Buffer.from(result));
|
||||
await unzipFile(zipFilePath, unzippedFilesDirectory);
|
||||
|
||||
this._onResultDownloaded.fire({
|
||||
@@ -177,11 +180,8 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
repositoryFullName,
|
||||
);
|
||||
|
||||
const repoTask: VariantAnalysisRepositoryTask = await fs.readJson(
|
||||
path.join(
|
||||
storageDirectory,
|
||||
VariantAnalysisResultsManager.REPO_TASK_FILENAME,
|
||||
),
|
||||
const repoTask: VariantAnalysisRepositoryTask = await readJson(
|
||||
join(storageDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME),
|
||||
);
|
||||
|
||||
if (!repoTask.databaseCommitSha || !repoTask.sourceLocationPrefix) {
|
||||
@@ -193,13 +193,13 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
repoTask.databaseCommitSha,
|
||||
);
|
||||
|
||||
const resultsDirectory = path.join(
|
||||
const resultsDirectory = join(
|
||||
storageDirectory,
|
||||
VariantAnalysisResultsManager.RESULTS_DIRECTORY,
|
||||
);
|
||||
const sarifPath = path.join(resultsDirectory, "results.sarif");
|
||||
const bqrsPath = path.join(resultsDirectory, "results.bqrs");
|
||||
if (await fs.pathExists(sarifPath)) {
|
||||
const sarifPath = join(resultsDirectory, "results.sarif");
|
||||
const bqrsPath = join(resultsDirectory, "results.bqrs");
|
||||
if (await pathExists(sarifPath)) {
|
||||
const interpretedResults = await this.readSarifResults(
|
||||
sarifPath,
|
||||
fileLinkPrefix,
|
||||
@@ -212,7 +212,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
if (await fs.pathExists(bqrsPath)) {
|
||||
if (await pathExists(bqrsPath)) {
|
||||
const rawResults = await this.readBqrsResults(
|
||||
bqrsPath,
|
||||
fileLinkPrefix,
|
||||
@@ -233,7 +233,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
variantAnalysisStoragePath: string,
|
||||
repositoryFullName: string,
|
||||
): Promise<boolean> {
|
||||
return await fs.pathExists(
|
||||
return await pathExists(
|
||||
this.getRepoStorageDirectory(
|
||||
variantAnalysisStoragePath,
|
||||
repositoryFullName,
|
||||
@@ -264,9 +264,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(
|
||||
`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(
|
||||
os.EOL,
|
||||
)}`,
|
||||
`Error processing SARIF file: ${EOL}${processedSarif.errors.join(EOL)}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -277,7 +275,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
variantAnalysisStoragePath: string,
|
||||
fullName: string,
|
||||
): string {
|
||||
return path.join(variantAnalysisStoragePath, fullName);
|
||||
return join(variantAnalysisStoragePath, fullName);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as messages from "./pure/messages-shared";
|
||||
import * as legacyMessages from "./pure/legacy-messages";
|
||||
import { DatabaseInfo, QueryMetadata } from "./pure/interface-types";
|
||||
import * as path from "path";
|
||||
import { join, parse, dirname, basename } from "path";
|
||||
import { createTimestampFile, showAndLogWarningMessage } from "./helpers";
|
||||
import {
|
||||
ConfigurationTarget,
|
||||
@@ -11,9 +11,15 @@ import {
|
||||
Uri,
|
||||
window,
|
||||
} from "vscode";
|
||||
import * as config from "./config";
|
||||
import { isCanary, AUTOSAVE_SETTING } from "./config";
|
||||
import { UserCancellationException } from "./commandRunner";
|
||||
import * as fs from "fs-extra";
|
||||
import {
|
||||
pathExists,
|
||||
readFile,
|
||||
createWriteStream,
|
||||
remove,
|
||||
readdir,
|
||||
} from "fs-extra";
|
||||
import {
|
||||
ensureMetadataIsComplete,
|
||||
InitialQueryInfo,
|
||||
@@ -37,27 +43,27 @@ import { asError } from "./pure/helpers-pure";
|
||||
*/
|
||||
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, "query.log");
|
||||
return join(resultPath, "query.log");
|
||||
}
|
||||
|
||||
function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, "evaluator-log.jsonl");
|
||||
return join(resultPath, "evaluator-log.jsonl");
|
||||
}
|
||||
|
||||
function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, "evaluator-log.summary");
|
||||
return join(resultPath, "evaluator-log.summary");
|
||||
}
|
||||
|
||||
function findJsonQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, "evaluator-log.summary.jsonl");
|
||||
return join(resultPath, "evaluator-log.summary.jsonl");
|
||||
}
|
||||
|
||||
function findQueryEvalLogSummarySymbolsFile(resultPath: string): string {
|
||||
return path.join(resultPath, "evaluator-log.summary.symbols.json");
|
||||
return join(resultPath, "evaluator-log.summary.symbols.json");
|
||||
}
|
||||
|
||||
function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, "evaluator-log-end.summary");
|
||||
return join(resultPath, "evaluator-log-end.summary");
|
||||
}
|
||||
|
||||
export class QueryEvaluationInfo {
|
||||
@@ -76,18 +82,18 @@ export class QueryEvaluationInfo {
|
||||
}
|
||||
|
||||
get dilPath() {
|
||||
return path.join(this.querySaveDir, "results.dil");
|
||||
return join(this.querySaveDir, "results.dil");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path that the compiled query is if it exists. Note that it only exists when using the legacy query server.
|
||||
*/
|
||||
get compileQueryPath() {
|
||||
return path.join(this.querySaveDir, "compiledQuery.qlo");
|
||||
return join(this.querySaveDir, "compiledQuery.qlo");
|
||||
}
|
||||
|
||||
get csvPath() {
|
||||
return path.join(this.querySaveDir, "results.csv");
|
||||
return join(this.querySaveDir, "results.csv");
|
||||
}
|
||||
|
||||
get logPath() {
|
||||
@@ -116,8 +122,8 @@ export class QueryEvaluationInfo {
|
||||
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, "results.bqrs"),
|
||||
interpretedResultsPath: path.join(
|
||||
resultsPath: join(this.querySaveDir, "results.bqrs"),
|
||||
interpretedResultsPath: join(
|
||||
this.querySaveDir,
|
||||
this.metadata?.kind === "graph"
|
||||
? "graphResults"
|
||||
@@ -126,7 +132,7 @@ export class QueryEvaluationInfo {
|
||||
};
|
||||
}
|
||||
getSortedResultSetPath(resultSetName: string) {
|
||||
return path.join(this.querySaveDir, `sortedResults-${resultSetName}.bqrs`);
|
||||
return join(this.querySaveDir, `sortedResults-${resultSetName}.bqrs`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -159,7 +165,7 @@ export class QueryEvaluationInfo {
|
||||
|
||||
// Graph queries only return interpreted results if we are in canary mode.
|
||||
if (kind === "graph") {
|
||||
return config.isCanary();
|
||||
return isCanary();
|
||||
}
|
||||
|
||||
// table is the default query kind. It does not produce interpreted results.
|
||||
@@ -171,21 +177,21 @@ export class QueryEvaluationInfo {
|
||||
* Holds if this query actually has produced interpreted results.
|
||||
*/
|
||||
async hasInterpretedResults(): Promise<boolean> {
|
||||
return fs.pathExists(this.resultsPaths.interpretedResultsPath);
|
||||
return pathExists(this.resultsPaths.interpretedResultsPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has DIL produced
|
||||
*/
|
||||
async hasDil(): Promise<boolean> {
|
||||
return fs.pathExists(this.dilPath);
|
||||
return pathExists(this.dilPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has CSV results produced
|
||||
*/
|
||||
async hasCsv(): Promise<boolean> {
|
||||
return fs.pathExists(this.csvPath);
|
||||
return pathExists(this.csvPath);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -197,7 +203,7 @@ export class QueryEvaluationInfo {
|
||||
return this.dilPath;
|
||||
}
|
||||
const compiledQuery = this.compileQueryPath;
|
||||
if (!(await fs.pathExists(compiledQuery))) {
|
||||
if (!(await pathExists(compiledQuery))) {
|
||||
if (await cliServer.cliConstraints.supportsNewQueryServer()) {
|
||||
// This could be from the new query server
|
||||
// in which case we expect the qlo to be missing so we should ignore it
|
||||
@@ -219,7 +225,7 @@ export class QueryEvaluationInfo {
|
||||
* Holds if this query already has a completed structured evaluator log
|
||||
*/
|
||||
async hasEvalLog(): Promise<boolean> {
|
||||
return fs.pathExists(this.evalLogPath);
|
||||
return pathExists(this.evalLogPath);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,7 +240,7 @@ export class QueryEvaluationInfo {
|
||||
queryInfo.evalLogSummaryLocation =
|
||||
await this.generateHumanReadableLogSummary(cliServer);
|
||||
void this.logEndSummary(queryInfo.evalLogSummaryLocation, logger); // Logged asynchrnously
|
||||
if (config.isCanary()) {
|
||||
if (isCanary()) {
|
||||
// Generate JSON summary for viewer.
|
||||
await cliServer.generateJsonLogSummary(
|
||||
this.evalLogPath,
|
||||
@@ -287,7 +293,7 @@ export class QueryEvaluationInfo {
|
||||
}
|
||||
|
||||
try {
|
||||
const endSummaryContent = await fs.readFile(
|
||||
const endSummaryContent = await readFile(
|
||||
this.evalLogEndSummaryPath,
|
||||
"utf-8",
|
||||
);
|
||||
@@ -323,7 +329,7 @@ export class QueryEvaluationInfo {
|
||||
return false;
|
||||
}
|
||||
let stopDecoding = false;
|
||||
const out = fs.createWriteStream(csvPath);
|
||||
const out = createWriteStream(csvPath);
|
||||
|
||||
const promise: Promise<boolean> = new Promise((resolve, reject) => {
|
||||
out.on("finish", () => resolve(true));
|
||||
@@ -422,7 +428,7 @@ export class QueryEvaluationInfo {
|
||||
* Cleans this query's results directory.
|
||||
*/
|
||||
async deleteQuery(): Promise<void> {
|
||||
await fs.remove(this.querySaveDir);
|
||||
await remove(this.querySaveDir);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -563,7 +569,7 @@ async function promptUserToSaveChanges(
|
||||
document: TextDocument,
|
||||
): Promise<boolean> {
|
||||
if (document.isDirty) {
|
||||
if (config.AUTOSAVE_SETTING.getValue()) {
|
||||
if (AUTOSAVE_SETTING.getValue()) {
|
||||
return true;
|
||||
} else {
|
||||
const yesItem = { title: "Yes", isCloseAffordance: false };
|
||||
@@ -584,10 +590,7 @@ async function promptUserToSaveChanges(
|
||||
);
|
||||
|
||||
if (chosenItem === alwaysItem) {
|
||||
await config.AUTOSAVE_SETTING.updateValue(
|
||||
true,
|
||||
ConfigurationTarget.Workspace,
|
||||
);
|
||||
await AUTOSAVE_SETTING.updateValue(true, ConfigurationTarget.Workspace);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -608,13 +611,13 @@ async function promptUserToSaveChanges(
|
||||
*/
|
||||
async function convertToQlPath(filePath: string): Promise<string> {
|
||||
if (process.platform === "win32") {
|
||||
if (path.parse(filePath).root === filePath) {
|
||||
if (parse(filePath).root === filePath) {
|
||||
// Java assumes uppercase drive letters are canonical.
|
||||
return filePath.toUpperCase();
|
||||
} else {
|
||||
const dir = await convertToQlPath(path.dirname(filePath));
|
||||
const fileName = path.basename(filePath);
|
||||
const fileNames = await fs.readdir(dir);
|
||||
const dir = await convertToQlPath(dirname(filePath));
|
||||
const fileName = basename(filePath);
|
||||
const fileNames = await readdir(dir);
|
||||
for (const name of fileNames) {
|
||||
// Leave the locale argument empty so that the default OS locale is used.
|
||||
// We do this because this operation works on filesystem entities, which
|
||||
@@ -623,7 +626,7 @@ async function convertToQlPath(filePath: string): Promise<string> {
|
||||
fileName.localeCompare(name, undefined, { sensitivity: "accent" }) ===
|
||||
0
|
||||
) {
|
||||
return path.join(dir, name);
|
||||
return join(dir, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -658,7 +661,7 @@ export async function createInitialQueryInfo(
|
||||
isQuickEval,
|
||||
isQuickQuery: isQuickQueryPath(queryPath),
|
||||
databaseInfo,
|
||||
id: `${path.basename(queryPath)}-${nanoid()}`,
|
||||
id: `${basename(queryPath)}-${nanoid()}`,
|
||||
start: new Date(),
|
||||
...(isQuickEval
|
||||
? {
|
||||
@@ -666,7 +669,7 @@ export async function createInitialQueryInfo(
|
||||
quickEvalPosition: quickEvalPosition,
|
||||
}
|
||||
: {
|
||||
queryText: await fs.readFile(queryPath, "utf8"),
|
||||
queryText: await readFile(queryPath, "utf8"),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as Sarif from "sarif";
|
||||
import * as fs from "fs-extra";
|
||||
import { createReadStream } from "fs-extra";
|
||||
import { connectTo } from "stream-json/Assembler";
|
||||
import { getErrorMessage } from "./pure/helpers-pure";
|
||||
import { withParser } from "stream-json/filters/Pick";
|
||||
@@ -11,9 +11,9 @@ export async function sarifParser(
|
||||
): Promise<Sarif.Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const pipeline = fs
|
||||
.createReadStream(interpretedResultsPath)
|
||||
.pipe(withParser({ filter: "runs.0.results" }));
|
||||
const pipeline = createReadStream(interpretedResultsPath).pipe(
|
||||
withParser({ filter: "runs.0.results" }),
|
||||
);
|
||||
|
||||
// Creates JavaScript objects from the token stream
|
||||
const asm = connectTo(pipeline);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { access } from "fs-extra";
|
||||
import { dirname, extname } from "path";
|
||||
import * as vscode from "vscode";
|
||||
import {
|
||||
TestAdapter,
|
||||
@@ -57,7 +57,7 @@ export function getActualFile(testPath: string): string {
|
||||
* @param testPath The full path to the test file.
|
||||
*/
|
||||
export function getTestDirectory(testPath: string): string {
|
||||
return path.dirname(testPath);
|
||||
return dirname(testPath);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -97,7 +97,7 @@ export class QLTestAdapterFactory extends DisposableObject {
|
||||
* @param ext The new extension, including the `.`.
|
||||
*/
|
||||
function changeExtension(p: string, ext: string): string {
|
||||
return p.slice(0, -path.extname(p).length) + ext;
|
||||
return p.slice(0, -extname(p).length) + ext;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -325,7 +325,7 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
|
||||
|
||||
private async isFileAccessible(uri: vscode.Uri): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(uri.fsPath);
|
||||
await access(uri.fsPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { lstat, copy, pathExists, createFile } from "fs-extra";
|
||||
import { basename } from "path";
|
||||
import { Uri, TextDocumentShowOptions, commands, window } from "vscode";
|
||||
import {
|
||||
TestHub,
|
||||
@@ -76,17 +76,17 @@ export class TestUIService extends UIService implements TestController {
|
||||
|
||||
private async acceptOutput(node: TestTreeNode): Promise<void> {
|
||||
const testId = node.info.id;
|
||||
const stat = await fs.lstat(testId);
|
||||
const stat = await lstat(testId);
|
||||
if (stat.isFile()) {
|
||||
const expectedPath = getExpectedFile(testId);
|
||||
const actualPath = getActualFile(testId);
|
||||
await fs.copy(actualPath, expectedPath, { overwrite: true });
|
||||
await copy(actualPath, expectedPath, { overwrite: true });
|
||||
}
|
||||
}
|
||||
|
||||
private async showOutputDifferences(node: TestTreeNode): Promise<void> {
|
||||
const testId = node.info.id;
|
||||
const stat = await fs.lstat(testId);
|
||||
const stat = await lstat(testId);
|
||||
if (stat.isFile()) {
|
||||
const expectedPath = getExpectedFile(testId);
|
||||
const expectedUri = Uri.file(expectedPath);
|
||||
@@ -96,22 +96,20 @@ export class TestUIService extends UIService implements TestController {
|
||||
preview: true,
|
||||
};
|
||||
|
||||
if (!(await fs.pathExists(expectedPath))) {
|
||||
if (!(await pathExists(expectedPath))) {
|
||||
void showAndLogWarningMessage(
|
||||
`'${path.basename(
|
||||
expectedPath,
|
||||
)}' does not exist. Creating an empty file.`,
|
||||
`'${basename(expectedPath)}' does not exist. Creating an empty file.`,
|
||||
);
|
||||
await fs.createFile(expectedPath);
|
||||
await createFile(expectedPath);
|
||||
}
|
||||
|
||||
if (await fs.pathExists(actualPath)) {
|
||||
if (await pathExists(actualPath)) {
|
||||
const actualUri = Uri.file(actualPath);
|
||||
await commands.executeCommand<void>(
|
||||
"vscode.diff",
|
||||
expectedUri,
|
||||
actualUri,
|
||||
`Expected vs. Actual for ${path.basename(testId)}`,
|
||||
`Expected vs. Actual for ${basename(testId)}`,
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as React from "react";
|
||||
import * as ReactDOM from "react-dom";
|
||||
import { createPortal } from "react-dom";
|
||||
import styled from "styled-components";
|
||||
import { XCircleIcon } from "@primer/octicons-react";
|
||||
|
||||
@@ -37,7 +37,7 @@ const FullScreenModal = ({
|
||||
throw Error(`Could not find container element. Id: ${containerElementId}`);
|
||||
}
|
||||
|
||||
return ReactDOM.createPortal(
|
||||
return createPortal(
|
||||
<>
|
||||
<Container>
|
||||
<CloseButton onClick={() => setOpen(false)}>
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as path from "path";
|
||||
import { basename } from "path";
|
||||
import * as React from "react";
|
||||
import * as Sarif from "sarif";
|
||||
import * as Keys from "../../pure/result-keys";
|
||||
import * as octicons from "./octicons";
|
||||
import { chevronDown, chevronRight, info, listUnordered } from "./octicons";
|
||||
import {
|
||||
className,
|
||||
renderLocation,
|
||||
@@ -222,7 +222,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
if ("hint" in parsedLoc) {
|
||||
return renderNonLocation("[no location]", parsedLoc.hint);
|
||||
} else if (isWholeFileLoc(parsedLoc)) {
|
||||
const shortLocation = `${path.basename(parsedLoc.userVisibleFile)}`;
|
||||
const shortLocation = `${basename(parsedLoc.userVisibleFile)}`;
|
||||
const longLocation = `${parsedLoc.userVisibleFile}`;
|
||||
return renderLocation(
|
||||
parsedLoc,
|
||||
@@ -232,7 +232,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
updateSelectionCallback(pathNodeKey),
|
||||
);
|
||||
} else if (isLineColumnLoc(parsedLoc)) {
|
||||
const shortLocation = `${path.basename(parsedLoc.userVisibleFile)}:${
|
||||
const shortLocation = `${basename(parsedLoc.userVisibleFile)}:${
|
||||
parsedLoc.startLine
|
||||
}:${parsedLoc.startColumn}`;
|
||||
const longLocation = `${parsedLoc.userVisibleFile}`;
|
||||
@@ -270,9 +270,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const currentResultExpanded = this.state.expanded.has(
|
||||
Keys.keyToString(resultKey),
|
||||
);
|
||||
const indicator = currentResultExpanded
|
||||
? octicons.chevronDown
|
||||
: octicons.chevronRight;
|
||||
const indicator = currentResultExpanded ? chevronDown : chevronRight;
|
||||
const location =
|
||||
result.locations !== undefined &&
|
||||
result.locations.length > 0 &&
|
||||
@@ -293,7 +291,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
key={resultIndex}
|
||||
{...selectableZebraStripe(resultRowIsSelected, resultIndex)}
|
||||
>
|
||||
<td className="vscode-codeql__icon-cell">{octicons.info}</td>
|
||||
<td className="vscode-codeql__icon-cell">{info}</td>
|
||||
<td colSpan={3}>{msg}</td>
|
||||
{locationCells}
|
||||
</tr>,
|
||||
@@ -320,9 +318,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
>
|
||||
{indicator}
|
||||
</td>
|
||||
<td className="vscode-codeql__icon-cell">
|
||||
{octicons.listUnordered}
|
||||
</td>
|
||||
<td className="vscode-codeql__icon-cell">{listUnordered}</td>
|
||||
<td colSpan={2}>{msg}</td>
|
||||
{locationCells}
|
||||
</tr>,
|
||||
@@ -335,8 +331,8 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
);
|
||||
if (currentResultExpanded) {
|
||||
const indicator = currentPathExpanded
|
||||
? octicons.chevronDown
|
||||
: octicons.chevronRight;
|
||||
? chevronDown
|
||||
: chevronRight;
|
||||
const isPathSpecificallySelected = Keys.equalsNotUndefined(
|
||||
pathKey,
|
||||
selectedItem,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as React from "react";
|
||||
import * as d3 from "d3";
|
||||
import { select } from "d3";
|
||||
import { ResultTableProps, jumpToLocation } from "./result-table-utils";
|
||||
import {
|
||||
InterpretedResultSet,
|
||||
@@ -87,7 +87,7 @@ export class Graph extends React.Component<GraphProps> {
|
||||
d.attributes["xlink:href"] = "#";
|
||||
d.attributes["href"] = "#";
|
||||
loc.uri = "file://" + loc.uri;
|
||||
d3.select(this).on("click", function (e) {
|
||||
select(this).on("click", function (e) {
|
||||
jumpToLocation(loc, databaseUri);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import * as React from "react";
|
||||
import { createRef } from "react";
|
||||
|
||||
/**
|
||||
* Some book-keeping needed to scroll a specific HTML element into view in a React component.
|
||||
*/
|
||||
export class ScrollIntoViewHelper {
|
||||
private selectedElementRef = React.createRef<HTMLElement | any>(); // need 'any' to work around typing bug in React
|
||||
private selectedElementRef = createRef<HTMLElement | any>(); // need 'any' to work around typing bug in React
|
||||
private shouldScrollIntoView = true;
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as ReactDOM from "react-dom";
|
||||
import { render as ReactDOM_render } from "react-dom";
|
||||
import { vscode } from "./vscode-api";
|
||||
|
||||
import { WebviewDefinition } from "./webview-definition";
|
||||
@@ -25,7 +25,7 @@ const render = () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const view: WebviewDefinition = require(`./${viewName}/index.tsx`).default;
|
||||
|
||||
ReactDOM.render(
|
||||
ReactDOM_render(
|
||||
view.component,
|
||||
document.getElementById("root"),
|
||||
// Post a message to the extension when fully loaded.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import { extensions, CancellationToken, Uri, window } from "vscode";
|
||||
|
||||
import { CodeQLExtensionInterface } from "../../extension";
|
||||
@@ -68,7 +68,7 @@ describe("Databases", () => {
|
||||
expect(dbItem).toBeDefined();
|
||||
dbItem = dbItem!;
|
||||
expect(dbItem.name).toBe("db");
|
||||
expect(dbItem.databaseUri.fsPath).toBe(path.join(storagePath, "db", "db"));
|
||||
expect(dbItem.databaseUri.fsPath).toBe(join(storagePath, "db", "db"));
|
||||
});
|
||||
|
||||
it("should add a database from lgtm with only one language", async () => {
|
||||
@@ -84,7 +84,7 @@ describe("Databases", () => {
|
||||
dbItem = dbItem!;
|
||||
expect(dbItem.name).toBe("aeisenberg_angular-bind-notifier_106179a");
|
||||
expect(dbItem.databaseUri.fsPath).toBe(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
"javascript",
|
||||
"aeisenberg_angular-bind-notifier_106179a",
|
||||
@@ -106,7 +106,7 @@ describe("Databases", () => {
|
||||
dbItem = dbItem!;
|
||||
expect(dbItem.name).toBe("db");
|
||||
expect(dbItem.databaseUri.fsPath).toBe(
|
||||
path.join(storagePath, "simple-db", "db"),
|
||||
join(storagePath, "simple-db", "db"),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as path from "path";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as fs from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { load, dump } from "js-yaml";
|
||||
import { realpathSync, readFileSync, writeFileSync } from "fs-extra";
|
||||
import { commands } from "vscode";
|
||||
import { DatabaseManager } from "../../databases";
|
||||
import { CodeQLCliServer } from "../../cli";
|
||||
@@ -13,8 +13,8 @@ export const DB_URL =
|
||||
|
||||
// We need to resolve the path, but the final three segments won't exist until later, so we only resolve the
|
||||
// first portion of the path.
|
||||
export const dbLoc = path.join(
|
||||
fs.realpathSync(path.join(__dirname, "../../../")),
|
||||
export const dbLoc = join(
|
||||
realpathSync(join(__dirname, "../../../")),
|
||||
"build/tests/db.zip",
|
||||
);
|
||||
export let storagePath: string;
|
||||
@@ -45,12 +45,10 @@ export async function fixWorkspaceReferences(
|
||||
): Promise<Record<string, string> | undefined> {
|
||||
if (!(await cli.cliConstraints.supportsWorkspaceReferences())) {
|
||||
// remove the workspace references from the qlpack
|
||||
const qlpack = yaml.load(
|
||||
fs.readFileSync(qlpackFileWithWorkspaceRefs, "utf8"),
|
||||
);
|
||||
const qlpack = load(readFileSync(qlpackFileWithWorkspaceRefs, "utf8"));
|
||||
const originalDeps = { ...qlpack.dependencies };
|
||||
removeWorkspaceRefs(qlpack);
|
||||
fs.writeFileSync(qlpackFileWithWorkspaceRefs, yaml.dump(qlpack));
|
||||
writeFileSync(qlpackFileWithWorkspaceRefs, dump(qlpack));
|
||||
return originalDeps;
|
||||
}
|
||||
return undefined;
|
||||
@@ -71,9 +69,7 @@ export async function restoreWorkspaceReferences(
|
||||
if (!originalDeps) {
|
||||
return;
|
||||
}
|
||||
const qlpack = yaml.load(
|
||||
fs.readFileSync(qlpackFileWithWorkspaceRefs, "utf8"),
|
||||
);
|
||||
const qlpack = load(readFileSync(qlpackFileWithWorkspaceRefs, "utf8"));
|
||||
qlpack.dependencies = originalDeps;
|
||||
fs.writeFileSync(qlpackFileWithWorkspaceRefs, yaml.dump(qlpack));
|
||||
writeFileSync(qlpackFileWithWorkspaceRefs, dump(qlpack));
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import { extensions } from "vscode";
|
||||
|
||||
import { CodeQLCliServer } from "../../cli";
|
||||
@@ -9,10 +9,7 @@ import { tryGetQueryMetadata } from "../../helpers";
|
||||
jest.setTimeout(3 * 60 * 1000);
|
||||
|
||||
describe("helpers (with CLI)", () => {
|
||||
const baseDir = path.join(
|
||||
__dirname,
|
||||
"../../../src/vscode-tests/cli-integration",
|
||||
);
|
||||
const baseDir = join(__dirname, "../../../src/vscode-tests/cli-integration");
|
||||
|
||||
let cli: CodeQLCliServer;
|
||||
|
||||
@@ -35,7 +32,7 @@ describe("helpers (with CLI)", () => {
|
||||
// Query with metadata
|
||||
const metadata = await tryGetQueryMetadata(
|
||||
cli,
|
||||
path.join(baseDir, "data", "simple-javascript-query.ql"),
|
||||
join(baseDir, "data", "simple-javascript-query.ql"),
|
||||
);
|
||||
|
||||
expect(metadata!.name).toBe("This is the name");
|
||||
@@ -47,7 +44,7 @@ describe("helpers (with CLI)", () => {
|
||||
// Query with empty metadata
|
||||
const noMetadata = await tryGetQueryMetadata(
|
||||
cli,
|
||||
path.join(baseDir, "data", "simple-query.ql"),
|
||||
join(baseDir, "data", "simple-query.ql"),
|
||||
);
|
||||
|
||||
expect(noMetadata).toEqual({});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as cp from "child_process";
|
||||
import * as path from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
import { dirname } from "path";
|
||||
|
||||
import type * as JestRunner from "jest-runner";
|
||||
import VSCodeTestRunner, { RunnerOptions } from "jest-runner-vscode";
|
||||
@@ -26,7 +26,7 @@ export default class JestRunnerCliIntegration extends VSCodeTestRunner {
|
||||
new Set<`${RunnerOptions["version"]}-${RunnerOptions["platform"]}`>();
|
||||
|
||||
for (const test of tests) {
|
||||
const testDir = path.dirname(test.path);
|
||||
const testDir = dirname(test.path);
|
||||
|
||||
const options: RunnerOptions =
|
||||
((await cosmiconfig("jest-runner-vscode").search(testDir))
|
||||
@@ -49,7 +49,7 @@ export default class JestRunnerCliIntegration extends VSCodeTestRunner {
|
||||
const [cli, ...args] =
|
||||
resolveCliArgsFromVSCodeExecutablePath(vscodeExecutablePath);
|
||||
|
||||
cp.spawnSync(
|
||||
spawnSync(
|
||||
cli,
|
||||
[
|
||||
...args,
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import {
|
||||
mkdirpSync,
|
||||
existsSync,
|
||||
createWriteStream,
|
||||
realpathSync,
|
||||
} from "fs-extra";
|
||||
import { dirname } from "path";
|
||||
import fetch from "node-fetch";
|
||||
import { DB_URL, dbLoc, setStoragePath, storagePath } from "./global.helper";
|
||||
import * as tmp from "tmp";
|
||||
@@ -22,13 +27,13 @@ beforeAll(async () => {
|
||||
await getTestSetting(CUSTOM_CODEQL_PATH_SETTING)?.setup();
|
||||
|
||||
// ensure the test database is downloaded
|
||||
fs.mkdirpSync(path.dirname(dbLoc));
|
||||
if (!fs.existsSync(dbLoc)) {
|
||||
mkdirpSync(dirname(dbLoc));
|
||||
if (!existsSync(dbLoc)) {
|
||||
console.log(`Downloading test database to ${dbLoc}`);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
return fetch(DB_URL).then((response) => {
|
||||
const dest = fs.createWriteStream(dbLoc);
|
||||
const dest = createWriteStream(dbLoc);
|
||||
response.body.pipe(dest);
|
||||
|
||||
response.body.on("error", reject);
|
||||
@@ -42,7 +47,7 @@ beforeAll(async () => {
|
||||
|
||||
// Create the temp directory to be used as extension local storage.
|
||||
const dir = tmp.dirSync();
|
||||
let storagePath = fs.realpathSync(dir.name);
|
||||
let storagePath = realpathSync(dir.name);
|
||||
if (storagePath.substring(0, 2).match(/[A-Z]:/)) {
|
||||
storagePath =
|
||||
storagePath.substring(0, 1).toLocaleLowerCase() +
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp";
|
||||
import * as url from "url";
|
||||
import { existsSync } from "fs-extra";
|
||||
import { join, basename } from "path";
|
||||
import { dirSync } from "tmp";
|
||||
import { pathToFileURL } from "url";
|
||||
import { CancellationTokenSource } from "vscode-jsonrpc";
|
||||
import * as messages from "../../pure/legacy-messages";
|
||||
import * as qsClient from "../../legacy-query-server/queryserver-client";
|
||||
@@ -13,16 +13,16 @@ import { describeWithCodeQL } from "../cli";
|
||||
import { QueryServerClient } from "../../legacy-query-server/queryserver-client";
|
||||
import { extLogger, ProgressReporter } from "../../common";
|
||||
|
||||
const baseDir = path.join(__dirname, "../../../test/data");
|
||||
const baseDir = join(__dirname, "../../../test/data");
|
||||
|
||||
const tmpDir = tmp.dirSync({
|
||||
const tmpDir = dirSync({
|
||||
prefix: "query_test_",
|
||||
keep: false,
|
||||
unsafeCleanup: true,
|
||||
});
|
||||
|
||||
const COMPILED_QUERY_PATH = path.join(tmpDir.name, "compiled.qlo");
|
||||
const RESULTS_PATH = path.join(tmpDir.name, "results.bqrs");
|
||||
const COMPILED_QUERY_PATH = join(tmpDir.name, "compiled.qlo");
|
||||
const RESULTS_PATH = join(tmpDir.name, "results.bqrs");
|
||||
|
||||
const source = new CancellationTokenSource();
|
||||
const token = source.token;
|
||||
@@ -70,19 +70,19 @@ type QueryTestCase = {
|
||||
// Test cases: queries to run and their expected results.
|
||||
const queryTestCases: QueryTestCase[] = [
|
||||
{
|
||||
queryPath: path.join(baseDir, "query.ql"),
|
||||
queryPath: join(baseDir, "query.ql"),
|
||||
expectedResultSets: {
|
||||
"#select": [[42, 3.14159, "hello world", true]],
|
||||
},
|
||||
},
|
||||
{
|
||||
queryPath: path.join(baseDir, "compute-default-strings.ql"),
|
||||
queryPath: join(baseDir, "compute-default-strings.ql"),
|
||||
expectedResultSets: {
|
||||
"#select": [[{ label: "(no string representation)" }]],
|
||||
},
|
||||
},
|
||||
{
|
||||
queryPath: path.join(baseDir, "multiple-result-sets.ql"),
|
||||
queryPath: join(baseDir, "multiple-result-sets.ql"),
|
||||
expectedResultSets: {
|
||||
edges: [
|
||||
[1, 2],
|
||||
@@ -94,7 +94,7 @@ const queryTestCases: QueryTestCase[] = [
|
||||
];
|
||||
|
||||
const db: messages.Dataset = {
|
||||
dbDir: path.join(__dirname, "../../../.vscode-test/test-db"),
|
||||
dbDir: join(__dirname, "../../../.vscode-test/test-db"),
|
||||
workingSet: "default",
|
||||
};
|
||||
|
||||
@@ -148,7 +148,7 @@ describeWithCodeQL()("using the legacy query server", () => {
|
||||
});
|
||||
|
||||
for (const queryTestCase of queryTestCases) {
|
||||
const queryName = path.basename(queryTestCase.queryPath);
|
||||
const queryName = basename(queryTestCase.queryPath);
|
||||
const compilationSucceeded = new Checkpoint<void>();
|
||||
const evaluationSucceeded = new Checkpoint<void>();
|
||||
const parsedResults = new Checkpoint<void>();
|
||||
@@ -167,11 +167,11 @@ describeWithCodeQL()("using the legacy query server", () => {
|
||||
});
|
||||
|
||||
it(`should be able to compile query ${queryName}`, async () => {
|
||||
expect(fs.existsSync(queryTestCase.queryPath)).toBe(true);
|
||||
expect(existsSync(queryTestCase.queryPath)).toBe(true);
|
||||
try {
|
||||
const qlProgram: messages.QlProgram = {
|
||||
libraryPath: [],
|
||||
dbschemePath: path.join(baseDir, "test.dbscheme"),
|
||||
dbschemePath: join(baseDir, "test.dbscheme"),
|
||||
queryPath: queryTestCase.queryPath,
|
||||
};
|
||||
const params: messages.CompileQueryParams = {
|
||||
@@ -213,7 +213,7 @@ describeWithCodeQL()("using the legacy query server", () => {
|
||||
});
|
||||
const queryToRun: messages.QueryToRun = {
|
||||
resultsPath: RESULTS_PATH,
|
||||
qlo: url.pathToFileURL(COMPILED_QUERY_PATH).toString(),
|
||||
qlo: pathToFileURL(COMPILED_QUERY_PATH).toString(),
|
||||
allowUnknownTemplates: true,
|
||||
id: callbackId,
|
||||
timeoutSecs: 1000,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp";
|
||||
import { join, basename } from "path";
|
||||
import { dirSync } from "tmp";
|
||||
import { CancellationTokenSource } from "vscode-jsonrpc";
|
||||
import * as messages from "../../pure/new-messages";
|
||||
import * as qsClient from "../../query-server/queryserver-client";
|
||||
@@ -14,15 +14,15 @@ import { QueryResultType } from "../../pure/new-messages";
|
||||
import { cleanDatabases, dbLoc, storagePath } from "./global.helper";
|
||||
import { importArchiveDatabase } from "../../databaseFetcher";
|
||||
|
||||
const baseDir = path.join(__dirname, "../../../test/data");
|
||||
const baseDir = join(__dirname, "../../../test/data");
|
||||
|
||||
const tmpDir = tmp.dirSync({
|
||||
const tmpDir = dirSync({
|
||||
prefix: "query_test_",
|
||||
keep: false,
|
||||
unsafeCleanup: true,
|
||||
});
|
||||
|
||||
const RESULTS_PATH = path.join(tmpDir.name, "results.bqrs");
|
||||
const RESULTS_PATH = join(tmpDir.name, "results.bqrs");
|
||||
|
||||
const source = new CancellationTokenSource();
|
||||
const token = source.token;
|
||||
@@ -70,19 +70,19 @@ type QueryTestCase = {
|
||||
// Test cases: queries to run and their expected results.
|
||||
const queryTestCases: QueryTestCase[] = [
|
||||
{
|
||||
queryPath: path.join(baseDir, "query.ql"),
|
||||
queryPath: join(baseDir, "query.ql"),
|
||||
expectedResultSets: {
|
||||
"#select": [[42, 3.14159, "hello world", true]],
|
||||
},
|
||||
},
|
||||
{
|
||||
queryPath: path.join(baseDir, "compute-default-strings.ql"),
|
||||
queryPath: join(baseDir, "compute-default-strings.ql"),
|
||||
expectedResultSets: {
|
||||
"#select": [[{ label: "(no string representation)" }]],
|
||||
},
|
||||
},
|
||||
{
|
||||
queryPath: path.join(baseDir, "multiple-result-sets.ql"),
|
||||
queryPath: join(baseDir, "multiple-result-sets.ql"),
|
||||
expectedResultSets: {
|
||||
edges: [
|
||||
[1, 2],
|
||||
@@ -168,7 +168,7 @@ describeWithCodeQL()("using the new query server", () => {
|
||||
});
|
||||
|
||||
for (const queryTestCase of queryTestCases) {
|
||||
const queryName = path.basename(queryTestCase.queryPath);
|
||||
const queryName = basename(queryTestCase.queryPath);
|
||||
const evaluationSucceeded = new Checkpoint<void>();
|
||||
const parsedResults = new Checkpoint<void>();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { extensions, QuickPickItem, window } from "vscode";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
import { CodeQLCliServer } from "../../cli";
|
||||
import { CodeQLExtensionInterface } from "../../extension";
|
||||
@@ -91,7 +91,7 @@ describe("Packaging commands", () => {
|
||||
});
|
||||
|
||||
it("should install valid workspace pack", async () => {
|
||||
const rootDir = path.join(
|
||||
const rootDir = join(
|
||||
__dirname,
|
||||
"../../../src/vscode-tests/cli-integration/data",
|
||||
);
|
||||
@@ -109,7 +109,7 @@ describe("Packaging commands", () => {
|
||||
});
|
||||
|
||||
it("should throw an error when installing invalid workspace pack", async () => {
|
||||
const rootDir = path.join(
|
||||
const rootDir = join(
|
||||
__dirname,
|
||||
"../../../src/vscode-tests/cli-integration/data-invalid-pack",
|
||||
);
|
||||
|
||||
@@ -5,9 +5,15 @@ import {
|
||||
extensions,
|
||||
Uri,
|
||||
} from "vscode";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import * as yaml from "js-yaml";
|
||||
import { join, dirname } from "path";
|
||||
import {
|
||||
pathExistsSync,
|
||||
readFileSync,
|
||||
mkdirpSync,
|
||||
writeFileSync,
|
||||
unlinkSync,
|
||||
} from "fs-extra";
|
||||
import { load, dump } from "js-yaml";
|
||||
|
||||
import { DatabaseItem, DatabaseManager } from "../../databases";
|
||||
import { CodeQLExtensionInterface } from "../../extension";
|
||||
@@ -91,11 +97,11 @@ describeWithCodeQL()("Queries", () => {
|
||||
});
|
||||
|
||||
it("should run a query", async () => {
|
||||
const queryPath = path.join(__dirname, "data", "simple-query.ql");
|
||||
const queryPath = join(__dirname, "data", "simple-query.ql");
|
||||
const result = qs.compileAndRunQueryAgainstDatabase(
|
||||
dbItem,
|
||||
await mockInitialQueryInfo(queryPath),
|
||||
path.join(tmpDir.name, "mock-storage-path"),
|
||||
join(tmpDir.name, "mock-storage-path"),
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
@@ -107,11 +113,11 @@ describeWithCodeQL()("Queries", () => {
|
||||
// Asserts a fix for bug https://github.com/github/vscode-codeql/issues/733
|
||||
it("should restart the database and run a query", async () => {
|
||||
await commands.executeCommand("codeQL.restartQueryServer");
|
||||
const queryPath = path.join(__dirname, "data", "simple-query.ql");
|
||||
const queryPath = join(__dirname, "data", "simple-query.ql");
|
||||
const result = await qs.compileAndRunQueryAgainstDatabase(
|
||||
dbItem,
|
||||
await mockInitialQueryInfo(queryPath),
|
||||
path.join(tmpDir.name, "mock-storage-path"),
|
||||
join(tmpDir.name, "mock-storage-path"),
|
||||
progress,
|
||||
token,
|
||||
);
|
||||
@@ -123,32 +129,28 @@ describeWithCodeQL()("Queries", () => {
|
||||
await commands.executeCommand("codeQL.quickQuery");
|
||||
|
||||
// should have created the quick query file and query pack file
|
||||
expect(fs.pathExistsSync(qlFile)).toBe(true);
|
||||
expect(fs.pathExistsSync(qlpackFile)).toBe(true);
|
||||
expect(pathExistsSync(qlFile)).toBe(true);
|
||||
expect(pathExistsSync(qlpackFile)).toBe(true);
|
||||
|
||||
const qlpackContents: any = await yaml.load(
|
||||
fs.readFileSync(qlpackFile, "utf8"),
|
||||
);
|
||||
const qlpackContents: any = await load(readFileSync(qlpackFile, "utf8"));
|
||||
// Should have chosen the js libraries
|
||||
expect(qlpackContents.dependencies["codeql/javascript-all"]).toBe("*");
|
||||
|
||||
// Should also have a codeql-pack.lock.yml file
|
||||
const packFileToUse = fs.pathExistsSync(qlpackLockFile)
|
||||
const packFileToUse = pathExistsSync(qlpackLockFile)
|
||||
? qlpackLockFile
|
||||
: oldQlpackLockFile;
|
||||
const qlpackLock: any = await yaml.load(
|
||||
fs.readFileSync(packFileToUse, "utf8"),
|
||||
);
|
||||
const qlpackLock: any = await load(readFileSync(packFileToUse, "utf8"));
|
||||
expect(!!qlpackLock.dependencies["codeql/javascript-all"].version).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it("should avoid creating a quick query", async () => {
|
||||
fs.mkdirpSync(path.dirname(qlpackFile));
|
||||
fs.writeFileSync(
|
||||
mkdirpSync(dirname(qlpackFile));
|
||||
writeFileSync(
|
||||
qlpackFile,
|
||||
yaml.dump({
|
||||
dump({
|
||||
name: "quick-query",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
@@ -156,16 +158,16 @@ describeWithCodeQL()("Queries", () => {
|
||||
},
|
||||
}),
|
||||
);
|
||||
fs.writeFileSync(qlFile, "xxx");
|
||||
writeFileSync(qlFile, "xxx");
|
||||
await commands.executeCommand("codeQL.quickQuery");
|
||||
|
||||
// should not have created the quick query file because database schema hasn't changed
|
||||
expect(fs.readFileSync(qlFile, "utf8")).toBe("xxx");
|
||||
expect(readFileSync(qlFile, "utf8")).toBe("xxx");
|
||||
});
|
||||
|
||||
function safeDel(file: string) {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
unlinkSync(file);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import {
|
||||
CancellationTokenSource,
|
||||
commands,
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
Uri,
|
||||
window,
|
||||
} from "vscode";
|
||||
import * as yaml from "js-yaml";
|
||||
import { load } from "js-yaml";
|
||||
|
||||
import { QlPack } from "../../../remote-queries/run-remote-query";
|
||||
import { CodeQLCliServer } from "../../../cli";
|
||||
@@ -35,7 +35,7 @@ import {
|
||||
jest.setTimeout(3 * 60 * 1000);
|
||||
|
||||
describe("Remote queries", () => {
|
||||
const baseDir = path.join(
|
||||
const baseDir = join(
|
||||
__dirname,
|
||||
"../../../../src/vscode-tests/cli-integration",
|
||||
);
|
||||
@@ -182,7 +182,7 @@ describe("Remote queries", () => {
|
||||
expect(packFS.fileExists("not-in-pack.ql")).toBe(false);
|
||||
|
||||
// should have generated a correct qlpack file
|
||||
const qlpackContents: any = yaml.load(
|
||||
const qlpackContents: any = load(
|
||||
packFS.fileContents("qlpack.yml").toString("utf-8"),
|
||||
);
|
||||
expect(qlpackContents.name).toBe("codeql-remote/query");
|
||||
@@ -239,7 +239,7 @@ describe("Remote queries", () => {
|
||||
verifyQlPack("in-pack.ql", packFS.fileContents("qlpack.yml"), "0.0.0");
|
||||
|
||||
// should have generated a correct qlpack file
|
||||
const qlpackContents: any = yaml.load(
|
||||
const qlpackContents: any = load(
|
||||
packFS.fileContents("qlpack.yml").toString("utf-8"),
|
||||
);
|
||||
expect(qlpackContents.name).toBe("codeql-remote/query");
|
||||
@@ -299,7 +299,7 @@ describe("Remote queries", () => {
|
||||
);
|
||||
|
||||
// should have generated a correct qlpack file
|
||||
const qlpackContents: any = yaml.load(
|
||||
const qlpackContents: any = load(
|
||||
packFS.fileContents("qlpack.yml").toString("utf-8"),
|
||||
);
|
||||
expect(qlpackContents.name).toBe("codeql-remote/query");
|
||||
@@ -334,7 +334,7 @@ describe("Remote queries", () => {
|
||||
contents: Buffer,
|
||||
packVersion: string,
|
||||
) {
|
||||
const qlPack = yaml.load(contents.toString("utf-8")) as QlPack;
|
||||
const qlPack = load(contents.toString("utf-8")) as QlPack;
|
||||
|
||||
// don't check the build metadata since it is variable
|
||||
delete (qlPack as any).buildMetadata;
|
||||
@@ -358,6 +358,6 @@ describe("Remote queries", () => {
|
||||
}
|
||||
|
||||
function getFile(file: string): Uri {
|
||||
return Uri.file(path.join(baseDir, file));
|
||||
return Uri.file(join(baseDir, file));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -18,7 +18,7 @@ import * as ghApiClient from "../../../remote-queries/gh-api/gh-api-client";
|
||||
import * as ghActionsApiClient from "../../../remote-queries/gh-api/gh-actions-api-client";
|
||||
import { Credentials } from "../../../authentication";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
import { VariantAnalysisManager } from "../../../remote-queries/variant-analysis-manager";
|
||||
import { CodeQLCliServer } from "../../../cli";
|
||||
@@ -120,7 +120,7 @@ describe("Variant Analysis Manager", () => {
|
||||
let originalDeps: Record<string, string> | undefined;
|
||||
let executeCommandSpy: jest.SpiedFunction<typeof commands.executeCommand>;
|
||||
|
||||
const baseDir = path.join(
|
||||
const baseDir = join(
|
||||
__dirname,
|
||||
"../../../../src/vscode-tests/cli-integration",
|
||||
);
|
||||
@@ -129,7 +129,7 @@ describe("Variant Analysis Manager", () => {
|
||||
).fsPath;
|
||||
|
||||
function getFile(file: string): Uri {
|
||||
return Uri.file(path.join(baseDir, file));
|
||||
return Uri.file(join(baseDir, file));
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -279,7 +279,7 @@ describe("Variant Analysis Manager", () => {
|
||||
expect(stub).toBeCalledTimes(1);
|
||||
expect(pathExistsStub).toHaveBeenCalledTimes(1);
|
||||
expect(pathExistsStub).toBeCalledWith(
|
||||
path.join(storagePath, variantAnalysis.id.toString()),
|
||||
join(storagePath, variantAnalysis.id.toString()),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -297,7 +297,7 @@ describe("Variant Analysis Manager", () => {
|
||||
).toEqual(variantAnalysis);
|
||||
|
||||
expect(pathExistsStub).toBeCalledWith(
|
||||
path.join(storagePath, variantAnalysis.id.toString()),
|
||||
join(storagePath, variantAnalysis.id.toString()),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -310,11 +310,7 @@ describe("Variant Analysis Manager", () => {
|
||||
|
||||
expect(readJsonStub).toHaveBeenCalledTimes(1);
|
||||
expect(readJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
),
|
||||
join(storagePath, variantAnalysis.id.toString(), "repo_states.json"),
|
||||
);
|
||||
expect(
|
||||
await variantAnalysisManager.getRepoStates(variantAnalysis.id),
|
||||
@@ -341,11 +337,7 @@ describe("Variant Analysis Manager", () => {
|
||||
|
||||
expect(readJsonStub).toHaveBeenCalledTimes(1);
|
||||
expect(readJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
),
|
||||
join(storagePath, variantAnalysis.id.toString(), "repo_states.json"),
|
||||
);
|
||||
expect(
|
||||
await variantAnalysisManager.getRepoStates(variantAnalysis.id),
|
||||
@@ -406,7 +398,7 @@ describe("Variant Analysis Manager", () => {
|
||||
} as unknown as Credentials;
|
||||
jest.spyOn(Credentials, "initialize").mockResolvedValue(mockCredentials);
|
||||
|
||||
const sourceFilePath = path.join(
|
||||
const sourceFilePath = join(
|
||||
__dirname,
|
||||
"../../../../src/vscode-tests/cli-integration/data/variant-analysis-results.zip",
|
||||
);
|
||||
@@ -512,7 +504,7 @@ describe("Variant Analysis Manager", () => {
|
||||
);
|
||||
|
||||
expect(outputJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
@@ -565,7 +557,7 @@ describe("Variant Analysis Manager", () => {
|
||||
);
|
||||
|
||||
expect(outputJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
@@ -607,7 +599,7 @@ describe("Variant Analysis Manager", () => {
|
||||
);
|
||||
|
||||
expect(outputJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
@@ -654,11 +646,11 @@ describe("Variant Analysis Manager", () => {
|
||||
);
|
||||
|
||||
expect(pathExistsStub).toBeCalledWith(
|
||||
path.join(storagePath, variantAnalysis.id.toString()),
|
||||
join(storagePath, variantAnalysis.id.toString()),
|
||||
);
|
||||
expect(readJsonStub).toHaveBeenCalledTimes(1);
|
||||
expect(readJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
@@ -674,7 +666,7 @@ describe("Variant Analysis Manager", () => {
|
||||
);
|
||||
|
||||
expect(outputJsonStub).toHaveBeenCalledWith(
|
||||
path.join(
|
||||
join(
|
||||
storagePath,
|
||||
variantAnalysis.id.toString(),
|
||||
"repo_states.json",
|
||||
|
||||
@@ -3,7 +3,7 @@ import { CodeQLExtensionInterface } from "../../../extension";
|
||||
import { extLogger } from "../../../common";
|
||||
import { Credentials } from "../../../authentication";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
|
||||
import { VariantAnalysisResultsManager } from "../../../remote-queries/variant-analysis-results-manager";
|
||||
import { CodeQLCliServer } from "../../../cli";
|
||||
@@ -57,7 +57,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
|
||||
dummyRepoTask = createMockVariantAnalysisRepositoryTask();
|
||||
|
||||
variantAnalysisStoragePath = path.join(
|
||||
variantAnalysisStoragePath = join(
|
||||
storagePath,
|
||||
variantAnalysisId.toString(),
|
||||
);
|
||||
@@ -109,7 +109,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const sourceFilePath = path.join(
|
||||
const sourceFilePath = join(
|
||||
__dirname,
|
||||
"../../../../src/vscode-tests/cli-integration/data/variant-analysis-results.zip",
|
||||
);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { resolve } from "path";
|
||||
|
||||
import {
|
||||
authentication,
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
window,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
|
||||
import { CodeQLExtensionInterface } from "../../../extension";
|
||||
@@ -25,7 +25,7 @@ afterAll(() => mockServer.stopServer());
|
||||
|
||||
async function showQlDocument(name: string): Promise<TextDocument> {
|
||||
const folderPath = workspace.workspaceFolders![0].uri.fsPath;
|
||||
const documentPath = path.resolve(folderPath, name);
|
||||
const documentPath = resolve(folderPath, name);
|
||||
const document = await workspace.openTextDocument(documentPath);
|
||||
await window.showTextDocument(document!);
|
||||
return document;
|
||||
@@ -94,7 +94,7 @@ describe("Variant Analysis Submission Integration", () => {
|
||||
});
|
||||
|
||||
const mockCredentials = {
|
||||
getOctokit: () => Promise.resolve(new Octokit.Octokit({ retry })),
|
||||
getOctokit: () => Promise.resolve(new Octokit({ retry })),
|
||||
} as unknown as Credentials;
|
||||
jest.spyOn(Credentials, "initialize").mockResolvedValue(mockCredentials);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { extensions, Uri } from "vscode";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import { SemVer } from "semver";
|
||||
|
||||
import { CodeQLCliServer, QueryInfoByLanguage } from "../../cli";
|
||||
@@ -72,11 +72,7 @@ describe("Use cli", () => {
|
||||
});
|
||||
|
||||
itWithCodeQL()("should resolve query by language", async () => {
|
||||
const queryPath = path.join(
|
||||
__dirname,
|
||||
"data",
|
||||
"simple-javascript-query.ql",
|
||||
);
|
||||
const queryPath = join(__dirname, "data", "simple-javascript-query.ql");
|
||||
const queryInfo: QueryInfoByLanguage = await cli.resolveQueryByLanguage(
|
||||
getOnDiskWorkspaceFolders(),
|
||||
Uri.file(queryPath),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { commands, Selection, window, workspace } from "vscode";
|
||||
import * as path from "path";
|
||||
import { join, basename } from "path";
|
||||
import { tmpDir } from "../../helpers";
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile, writeFile, ensureDir, copy } from "fs-extra";
|
||||
|
||||
jest.setTimeout(20_000);
|
||||
|
||||
@@ -12,8 +12,8 @@ describe("SourceMap", () => {
|
||||
it("should jump to QL code", async () => {
|
||||
const root = workspace.workspaceFolders![0].uri.fsPath;
|
||||
const srcFiles = {
|
||||
summary: path.join(root, "log-summary", "evaluator-log.summary"),
|
||||
summaryMap: path.join(root, "log-summary", "evaluator-log.summary.map"),
|
||||
summary: join(root, "log-summary", "evaluator-log.summary"),
|
||||
summaryMap: join(root, "log-summary", "evaluator-log.summary.map"),
|
||||
};
|
||||
// We need to modify the source map so that its paths point to the actual location of the
|
||||
// workspace root on this machine. We'll copy the summary and its source map to a temp
|
||||
@@ -22,11 +22,11 @@ describe("SourceMap", () => {
|
||||
|
||||
// The checked-in sourcemap has placeholders of the form `${root}`, which we need to replace
|
||||
// with the actual root directory.
|
||||
const mapText = await fs.readFile(tempFiles.summaryMap, "utf-8");
|
||||
const mapText = await readFile(tempFiles.summaryMap, "utf-8");
|
||||
// Always use forward slashes, since they work everywhere.
|
||||
const slashRoot = root.replaceAll("\\", "/");
|
||||
const newMapText = mapText.replaceAll("${root}", slashRoot);
|
||||
await fs.writeFile(tempFiles.summaryMap, newMapText);
|
||||
await writeFile(tempFiles.summaryMap, newMapText);
|
||||
|
||||
const summaryDocument = await workspace.openTextDocument(tempFiles.summary);
|
||||
expect(summaryDocument.languageId).toBe("ql-summary");
|
||||
@@ -37,7 +37,7 @@ describe("SourceMap", () => {
|
||||
const newEditor = window.activeTextEditor;
|
||||
expect(newEditor).toBeDefined();
|
||||
const newDocument = newEditor!.document;
|
||||
expect(path.basename(newDocument.fileName)).toBe("Namespace.qll");
|
||||
expect(basename(newDocument.fileName)).toBe("Namespace.qll");
|
||||
const newSelection = newEditor!.selection;
|
||||
expect(newSelection.start.line).toBe(60);
|
||||
expect(newSelection.start.character).toBe(2);
|
||||
@@ -46,12 +46,12 @@ describe("SourceMap", () => {
|
||||
async function copyFilesToTempDirectory<T extends Record<string, string>>(
|
||||
files: T,
|
||||
): Promise<T> {
|
||||
const tempDir = path.join(tmpDir.name, "log-summary");
|
||||
await fs.ensureDir(tempDir);
|
||||
const tempDir = join(tmpDir.name, "log-summary");
|
||||
await ensureDir(tempDir);
|
||||
const result: Record<string, string> = {};
|
||||
for (const [key, srcPath] of Object.entries(files)) {
|
||||
const destPath = path.join(tempDir, path.basename(srcPath));
|
||||
await fs.copy(srcPath, destPath);
|
||||
const destPath = join(tempDir, basename(srcPath));
|
||||
await copy(srcPath, destPath);
|
||||
result[key] = destPath;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { existsSync, createWriteStream, mkdirpSync } from "fs-extra";
|
||||
import { normalize, join } from "path";
|
||||
import {
|
||||
getRequiredAssetName,
|
||||
extractZipArchive,
|
||||
@@ -44,8 +44,7 @@ process.env.CLI_VERSION = CLI_VERSION;
|
||||
// Base dir where CLIs will be downloaded into
|
||||
// By default, put it in the `build` directory in the root of the extension.
|
||||
const CLI_BASE_DIR =
|
||||
process.env.CLI_DIR ||
|
||||
path.normalize(path.join(__dirname, "../../build/cli"));
|
||||
process.env.CLI_DIR || normalize(join(__dirname, "../../build/cli"));
|
||||
|
||||
export async function ensureCli(useCli: boolean) {
|
||||
try {
|
||||
@@ -67,7 +66,7 @@ export async function ensureCli(useCli: boolean) {
|
||||
const url = getCliDownloadUrl(assetName);
|
||||
const unzipDir = getCliUnzipDir();
|
||||
const downloadedFilePath = getDownloadFilePath(assetName);
|
||||
const executablePath = path.join(
|
||||
const executablePath = join(
|
||||
getCliUnzipDir(),
|
||||
"codeql",
|
||||
codeQlLauncherName(),
|
||||
@@ -76,14 +75,14 @@ export async function ensureCli(useCli: boolean) {
|
||||
// Use this environment variable to se to the `codeQL.cli.executablePath` in tests
|
||||
process.env.CLI_PATH = executablePath;
|
||||
|
||||
if (fs.existsSync(executablePath)) {
|
||||
if (existsSync(executablePath)) {
|
||||
console.log(
|
||||
`CLI version ${CLI_VERSION} is found ${executablePath}. Not going to download again.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(downloadedFilePath)) {
|
||||
if (!existsSync(downloadedFilePath)) {
|
||||
console.log(
|
||||
`CLI version ${CLI_VERSION} zip file not found. Downloading from '${url}' into '${downloadedFilePath}'.`,
|
||||
);
|
||||
@@ -93,7 +92,7 @@ export async function ensureCli(useCli: boolean) {
|
||||
assetStream.headers.get("content-length") || 0,
|
||||
);
|
||||
console.log("Total content size", Math.round(contentLength / _1MB), "MB");
|
||||
const archiveFile = fs.createWriteStream(downloadedFilePath);
|
||||
const archiveFile = createWriteStream(downloadedFilePath);
|
||||
const body = assetStream.body;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
let numBytesDownloaded = 0;
|
||||
@@ -125,7 +124,7 @@ export async function ensureCli(useCli: boolean) {
|
||||
}
|
||||
|
||||
console.log(`Unzipping into '${unzipDir}'`);
|
||||
fs.mkdirpSync(unzipDir);
|
||||
mkdirpSync(unzipDir);
|
||||
await extractZipArchive(downloadedFilePath, unzipDir);
|
||||
console.log("Done.");
|
||||
} catch (e) {
|
||||
@@ -153,14 +152,14 @@ function getCliDownloadUrl(assetName: string) {
|
||||
* Directory to place the downloaded cli into
|
||||
*/
|
||||
function getDownloadFilePath(assetName: string) {
|
||||
const dir = path.join(CLI_BASE_DIR, "assets", CLI_VERSION);
|
||||
fs.mkdirpSync(dir);
|
||||
return path.join(dir, assetName);
|
||||
const dir = join(CLI_BASE_DIR, "assets", CLI_VERSION);
|
||||
mkdirpSync(dir);
|
||||
return join(dir, assetName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Directory to unzip the downloaded cli into.
|
||||
*/
|
||||
function getCliUnzipDir() {
|
||||
return path.join(CLI_BASE_DIR, CLI_VERSION);
|
||||
return join(CLI_BASE_DIR, CLI_VERSION);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as path from "path";
|
||||
import * as vscode from "vscode";
|
||||
import * as determiningSelectedQueryTest from "./determining-selected-query-test";
|
||||
import { resolve } from "path";
|
||||
import { extensions, workspace } from "vscode";
|
||||
import { run } from "./determining-selected-query-test";
|
||||
|
||||
describe("launching with a minimal workspace", () => {
|
||||
const ext = vscode.extensions.getExtension("GitHub.vscode-codeql");
|
||||
const ext = extensions.getExtension("GitHub.vscode-codeql");
|
||||
it("should install the extension", () => {
|
||||
expect(ext).toBeDefined();
|
||||
});
|
||||
@@ -18,11 +18,11 @@ describe("launching with a minimal workspace", () => {
|
||||
it("should activate the extension when a .ql file is opened", async () => {
|
||||
await delay();
|
||||
|
||||
const folders = vscode.workspace.workspaceFolders;
|
||||
const folders = workspace.workspaceFolders;
|
||||
expect(folders?.length).toEqual(1);
|
||||
const folderPath = folders![0].uri.fsPath;
|
||||
const documentPath = path.resolve(folderPath, "query.ql");
|
||||
const document = await vscode.workspace.openTextDocument(documentPath);
|
||||
const documentPath = resolve(folderPath, "query.ql");
|
||||
const document = await workspace.openTextDocument(documentPath);
|
||||
expect(document.languageId).toEqual("ql");
|
||||
// Delay slightly so that the extension has time to activate.
|
||||
await delay();
|
||||
@@ -34,4 +34,4 @@ describe("launching with a minimal workspace", () => {
|
||||
}
|
||||
});
|
||||
|
||||
determiningSelectedQueryTest.run();
|
||||
run();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as tmp from "tmp";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { join } from "path";
|
||||
import { CancellationToken, ExtensionContext, Uri, workspace } from "vscode";
|
||||
|
||||
import {
|
||||
@@ -509,25 +509,25 @@ describe("databases", () => {
|
||||
describe("findSourceArchive", () => {
|
||||
["src", "output/src_archive"].forEach((name) => {
|
||||
it(`should find source folder in ${name}`, async () => {
|
||||
const uri = Uri.file(path.join(dir.name, name));
|
||||
fs.createFileSync(path.join(uri.fsPath, "hucairz.txt"));
|
||||
const uri = Uri.file(join(dir.name, name));
|
||||
fs.createFileSync(join(uri.fsPath, "hucairz.txt"));
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
});
|
||||
|
||||
it(`should find source archive in ${name}.zip`, async () => {
|
||||
const uri = Uri.file(path.join(dir.name, name + ".zip"));
|
||||
const uri = Uri.file(join(dir.name, name + ".zip"));
|
||||
fs.createFileSync(uri.fsPath);
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
});
|
||||
|
||||
it(`should prioritize ${name}.zip over ${name}`, async () => {
|
||||
const uri = Uri.file(path.join(dir.name, name + ".zip"));
|
||||
const uri = Uri.file(join(dir.name, name + ".zip"));
|
||||
fs.createFileSync(uri.fsPath);
|
||||
|
||||
const uriFolder = Uri.file(path.join(dir.name, name));
|
||||
fs.createFileSync(path.join(uriFolder.fsPath, "hucairz.txt"));
|
||||
const uriFolder = Uri.file(join(dir.name, name));
|
||||
fs.createFileSync(join(uriFolder.fsPath, "hucairz.txt"));
|
||||
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
@@ -535,9 +535,9 @@ describe("databases", () => {
|
||||
});
|
||||
|
||||
it("should prioritize src over output/src_archive", async () => {
|
||||
const uriSrc = Uri.file(path.join(dir.name, "src.zip"));
|
||||
const uriSrc = Uri.file(join(dir.name, "src.zip"));
|
||||
fs.createFileSync(uriSrc.fsPath);
|
||||
const uriSrcArchive = Uri.file(path.join(dir.name, "src.zip"));
|
||||
const uriSrcArchive = Uri.file(join(dir.name, "src.zip"));
|
||||
fs.createFileSync(uriSrcArchive.fsPath);
|
||||
|
||||
const resultUri = await findSourceArchive(dir.name);
|
||||
@@ -563,10 +563,10 @@ describe("databases", () => {
|
||||
}
|
||||
|
||||
function sourceLocationUri() {
|
||||
return Uri.file(path.join(dir.name, "src.zip"));
|
||||
return Uri.file(join(dir.name, "src.zip"));
|
||||
}
|
||||
|
||||
function dbLocationUri() {
|
||||
return Uri.file(path.join(dir.name, "db"));
|
||||
return Uri.file(join(dir.name, "db"));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as vscode from "vscode";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import { TreeItemCollapsibleState, ThemeIcon } from "vscode";
|
||||
import { join } from "path";
|
||||
import { ensureDir, remove, writeJson } from "fs-extra";
|
||||
import { DbConfig } from "../../../databases/config/db-config";
|
||||
import { DbManager } from "../../../databases/db-manager";
|
||||
import { DbConfigStore } from "../../../databases/config/db-config-store";
|
||||
@@ -11,10 +11,10 @@ import { ExtensionApp } from "../../../common/vscode/vscode-app";
|
||||
import { createMockExtensionContext } from "../../factories/extension-context";
|
||||
|
||||
describe("db panel", () => {
|
||||
const workspaceStoragePath = path.join(__dirname, "test-workspace-storage");
|
||||
const globalStoragePath = path.join(__dirname, "test-global-storage");
|
||||
const extensionPath = path.join(__dirname, "../../../../");
|
||||
const dbConfigFilePath = path.join(
|
||||
const workspaceStoragePath = join(__dirname, "test-workspace-storage");
|
||||
const globalStoragePath = join(__dirname, "test-global-storage");
|
||||
const extensionPath = join(__dirname, "../../../../");
|
||||
const dbConfigFilePath = join(
|
||||
workspaceStoragePath,
|
||||
"workspace-databases.json",
|
||||
);
|
||||
@@ -28,7 +28,7 @@ describe("db panel", () => {
|
||||
globalStoragePath,
|
||||
workspaceStoragePath,
|
||||
});
|
||||
await fs.ensureDir(workspaceStoragePath);
|
||||
await ensureDir(workspaceStoragePath);
|
||||
|
||||
const app = new ExtensionApp(extensionContext);
|
||||
|
||||
@@ -37,11 +37,11 @@ describe("db panel", () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await fs.ensureDir(workspaceStoragePath);
|
||||
await ensureDir(workspaceStoragePath);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(workspaceStoragePath);
|
||||
await remove(workspaceStoragePath);
|
||||
});
|
||||
|
||||
it("should render default local and remote nodes when the config is empty", async () => {
|
||||
@@ -73,7 +73,7 @@ describe("db panel", () => {
|
||||
expect(remoteRootNode.label).toBe("remote");
|
||||
expect(remoteRootNode.tooltip).toBe("Remote databases");
|
||||
expect(remoteRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(remoteRootNode.children).toBeTruthy();
|
||||
expect(remoteRootNode.children.length).toBe(3);
|
||||
@@ -92,7 +92,7 @@ describe("db panel", () => {
|
||||
expect(localRootNode.label).toBe("local");
|
||||
expect(localRootNode.tooltip).toBe("Local databases");
|
||||
expect(localRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(localRootNode.children).toBeTruthy();
|
||||
expect(localRootNode.children.length).toBe(0);
|
||||
@@ -133,7 +133,7 @@ describe("db panel", () => {
|
||||
const remoteRootNode = items[0];
|
||||
expect(remoteRootNode.dbItem).toBeTruthy();
|
||||
expect(remoteRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(remoteRootNode.children).toBeTruthy();
|
||||
expect(remoteRootNode.children.length).toBe(5);
|
||||
@@ -184,7 +184,7 @@ describe("db panel", () => {
|
||||
const remoteRootNode = items[0];
|
||||
expect(remoteRootNode.dbItem).toBeTruthy();
|
||||
expect(remoteRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(remoteRootNode.children).toBeTruthy();
|
||||
expect(remoteRootNode.children.length).toBe(5);
|
||||
@@ -223,7 +223,7 @@ describe("db panel", () => {
|
||||
const remoteRootNode = items[0];
|
||||
expect(remoteRootNode.dbItem).toBeTruthy();
|
||||
expect(remoteRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(remoteRootNode.children).toBeTruthy();
|
||||
expect(remoteRootNode.children.length).toBe(5);
|
||||
@@ -291,7 +291,7 @@ describe("db panel", () => {
|
||||
const localRootNode = items[1];
|
||||
expect(localRootNode.dbItem).toBeTruthy();
|
||||
expect(localRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(localRootNode.children).toBeTruthy();
|
||||
expect(localRootNode.children.length).toBe(2);
|
||||
@@ -366,7 +366,7 @@ describe("db panel", () => {
|
||||
const localRootNode = items[1];
|
||||
expect(localRootNode.dbItem).toBeTruthy();
|
||||
expect(localRootNode.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(localRootNode.children).toBeTruthy();
|
||||
expect(localRootNode.children.length).toBe(2);
|
||||
@@ -392,7 +392,7 @@ describe("db panel", () => {
|
||||
});
|
||||
|
||||
async function saveDbConfig(dbConfig: DbConfig): Promise<void> {
|
||||
await fs.writeJson(dbConfigFilePath, dbConfig);
|
||||
await writeJson(dbConfigFilePath, dbConfig);
|
||||
|
||||
// Ideally we would just initialise the db config store at the start
|
||||
// of each test and then rely on the file watcher to update the config.
|
||||
@@ -408,8 +408,8 @@ describe("db panel", () => {
|
||||
): void {
|
||||
expect(item.label).toBe(`Top ${n} repositories`);
|
||||
expect(item.tooltip).toBe(`Top ${n} repositories of a language`);
|
||||
expect(item.iconPath).toEqual(new vscode.ThemeIcon("github"));
|
||||
expect(item.collapsibleState).toBe(vscode.TreeItemCollapsibleState.None);
|
||||
expect(item.iconPath).toEqual(new ThemeIcon("github"));
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.None);
|
||||
}
|
||||
|
||||
function checkUserDefinedListItem(
|
||||
@@ -420,9 +420,7 @@ describe("db panel", () => {
|
||||
expect(item.label).toBe(listName);
|
||||
expect(item.tooltip).toBeUndefined();
|
||||
expect(item.iconPath).toBeUndefined();
|
||||
expect(item.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.Collapsed);
|
||||
expect(item.children).toBeTruthy();
|
||||
expect(item.children.length).toBe(repos.length);
|
||||
|
||||
@@ -434,8 +432,8 @@ describe("db panel", () => {
|
||||
function checkOwnerItem(item: DbTreeViewItem, ownerName: string): void {
|
||||
expect(item.label).toBe(ownerName);
|
||||
expect(item.tooltip).toBeUndefined();
|
||||
expect(item.iconPath).toEqual(new vscode.ThemeIcon("organization"));
|
||||
expect(item.collapsibleState).toBe(vscode.TreeItemCollapsibleState.None);
|
||||
expect(item.iconPath).toEqual(new ThemeIcon("organization"));
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.None);
|
||||
expect(item.children).toBeTruthy();
|
||||
expect(item.children.length).toBe(0);
|
||||
}
|
||||
@@ -443,8 +441,8 @@ describe("db panel", () => {
|
||||
function checkRemoteRepoItem(item: DbTreeViewItem, repoName: string): void {
|
||||
expect(item.label).toBe(repoName);
|
||||
expect(item.tooltip).toBeUndefined();
|
||||
expect(item.iconPath).toEqual(new vscode.ThemeIcon("database"));
|
||||
expect(item.collapsibleState).toBe(vscode.TreeItemCollapsibleState.None);
|
||||
expect(item.iconPath).toEqual(new ThemeIcon("database"));
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.None);
|
||||
}
|
||||
|
||||
function checkLocalListItem(
|
||||
@@ -455,9 +453,7 @@ describe("db panel", () => {
|
||||
expect(item.label).toBe(listName);
|
||||
expect(item.tooltip).toBeUndefined();
|
||||
expect(item.iconPath).toBeUndefined();
|
||||
expect(item.collapsibleState).toBe(
|
||||
vscode.TreeItemCollapsibleState.Collapsed,
|
||||
);
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.Collapsed);
|
||||
expect(item.children).toBeTruthy();
|
||||
expect(item.children.length).toBe(databases.length);
|
||||
|
||||
@@ -472,7 +468,7 @@ describe("db panel", () => {
|
||||
): void {
|
||||
expect(item.label).toBe(database.databaseName);
|
||||
expect(item.tooltip).toBe(`Language: ${database.language}`);
|
||||
expect(item.iconPath).toEqual(new vscode.ThemeIcon("database"));
|
||||
expect(item.collapsibleState).toBe(vscode.TreeItemCollapsibleState.None);
|
||||
expect(item.iconPath).toEqual(new ThemeIcon("database"));
|
||||
expect(item.collapsibleState).toBe(TreeItemCollapsibleState.None);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import * as path from "path";
|
||||
import { resolve, join } from "path";
|
||||
import * as vscode from "vscode";
|
||||
import { Uri } from "vscode";
|
||||
import { determineSelectedQuery } from "../../run-queries-shared";
|
||||
|
||||
async function showQlDocument(name: string): Promise<vscode.TextDocument> {
|
||||
const folderPath = vscode.workspace.workspaceFolders![0].uri.fsPath;
|
||||
const documentPath = path.resolve(folderPath, name);
|
||||
const documentPath = resolve(folderPath, name);
|
||||
const document = await vscode.workspace.openTextDocument(documentPath);
|
||||
await vscode.window.showTextDocument(document!);
|
||||
return document;
|
||||
@@ -18,7 +18,7 @@ export function run() {
|
||||
Uri.parse("file:///tmp/queryname.ql"),
|
||||
false,
|
||||
);
|
||||
expect(q.queryPath).toBe(path.join("/", "tmp", "queryname.ql"));
|
||||
expect(q.queryPath).toBe(join("/", "tmp", "queryname.ql"));
|
||||
expect(q.quickEvalPosition).toBeUndefined();
|
||||
});
|
||||
|
||||
@@ -26,9 +26,7 @@ export function run() {
|
||||
const doc = await showQlDocument("query.ql");
|
||||
const q = await determineSelectedQuery(doc.uri, true);
|
||||
expect(
|
||||
q.queryPath.endsWith(
|
||||
path.join("ql-vscode", "test", "data", "query.ql"),
|
||||
),
|
||||
q.queryPath.endsWith(join("ql-vscode", "test", "data", "query.ql")),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
@@ -36,9 +34,7 @@ export function run() {
|
||||
const doc = await showQlDocument("library.qll");
|
||||
const q = await determineSelectedQuery(doc.uri, true);
|
||||
expect(
|
||||
q.queryPath.endsWith(
|
||||
path.join("ql-vscode", "test", "data", "library.qll"),
|
||||
),
|
||||
q.queryPath.endsWith(join("ql-vscode", "test", "data", "library.qll")),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
// test is run at the start-up of a new VSCode instance. No other files should be located in this directory since
|
||||
// those may activate the extension before this test is run.
|
||||
|
||||
import * as vscode from "vscode";
|
||||
import { extensions } from "vscode";
|
||||
|
||||
// Note that this may open the most recent VSCode workspace.
|
||||
describe("launching with no specified workspace", () => {
|
||||
const ext = vscode.extensions.getExtension("GitHub.vscode-codeql");
|
||||
const ext = extensions.getExtension("GitHub.vscode-codeql");
|
||||
it("should install the extension", () => {
|
||||
expect(ext).not.toBeUndefined();
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as path from "path";
|
||||
import { resolve } from "path";
|
||||
|
||||
import {
|
||||
encodeSourceArchiveUri,
|
||||
@@ -14,7 +14,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("reads empty file correctly", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/single_file.zip",
|
||||
),
|
||||
@@ -27,7 +27,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("read non-empty file correctly", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -40,7 +40,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("read a directory", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -57,7 +57,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("should handle a missing directory", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -74,7 +74,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("should handle a missing file", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -91,7 +91,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("should handle reading a file as a directory", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -108,7 +108,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("should handle reading a directory as a file", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
@@ -125,7 +125,7 @@ describe("archive-filesystem-provider", () => {
|
||||
it("read a nested directory", async () => {
|
||||
const archiveProvider = new ArchiveFileSystemProvider();
|
||||
const uri = encodeSourceArchiveUri({
|
||||
sourceArchiveZipPath: path.resolve(
|
||||
sourceArchiveZipPath: resolve(
|
||||
__dirname,
|
||||
"data/archive-filesystem-provider-test/zip_with_folder.zip",
|
||||
),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user