Add eslint-plugin-import
It seems like we had some rules that disabled rules of this plugin, but we didn't actually have it installed. I've now installed it, used the recommended configuration, and removed our own disable rules. I've fixed any errors that this introduced.
This commit is contained in:
@@ -27,6 +27,8 @@ const baseConfig = {
|
||||
"plugin:jest-dom/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:import/typescript",
|
||||
],
|
||||
rules: {
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
@@ -57,15 +59,6 @@ const baseConfig = {
|
||||
"filenames/match-regexp": "off",
|
||||
"func-style": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"import/named": "off",
|
||||
"import/no-dynamic-require": "off",
|
||||
"import/no-dynamic-required": "off",
|
||||
"import/no-anonymous-default-export": "off",
|
||||
"import/no-commonjs": "off",
|
||||
"import/no-mutable-exports": "off",
|
||||
"import/no-namespace": "off",
|
||||
"import/no-unresolved": "off",
|
||||
"import/no-webpack-loader-syntax": "off",
|
||||
"no-invalid-this": "off",
|
||||
"no-fallthrough": "off",
|
||||
"no-console": "off",
|
||||
@@ -73,6 +66,19 @@ const baseConfig = {
|
||||
"github/array-foreach": "off",
|
||||
"github/no-then": "off",
|
||||
"react/jsx-key": ["error", { checkFragmentShorthand: true }],
|
||||
"import/no-cycle": "off",
|
||||
"import/no-namespace": "off",
|
||||
// Never allow extensions in import paths, except for JSON files where they are required.
|
||||
"import/extensions": ["error", "never", { json: "always" }],
|
||||
},
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
typescript: true,
|
||||
node: true,
|
||||
},
|
||||
"import/extensions": [".js", ".jsx", ".ts", ".tsx", ".json"],
|
||||
// vscode and sarif don't exist on-disk, but only provide types.
|
||||
"import/core-modules": ["vscode", "sarif"],
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -8,27 +8,27 @@ import { VSCodeTheme } from "./theme";
|
||||
|
||||
const themeFiles: { [key in VSCodeTheme]: string } = {
|
||||
[VSCodeTheme.Dark]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-dark.css")
|
||||
.default,
|
||||
[VSCodeTheme.Light]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-light.css")
|
||||
.default,
|
||||
[VSCodeTheme.LightHighContrast]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-light-high-contrast.css")
|
||||
.default,
|
||||
[VSCodeTheme.DarkHighContrast]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-dark-high-contrast.css")
|
||||
.default,
|
||||
[VSCodeTheme.GitHubLightDefault]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-github-light-default.css")
|
||||
.default,
|
||||
[VSCodeTheme.GitHubDarkDefault]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-github-dark-default.css")
|
||||
.default,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { src, dest } from "gulp";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs
|
||||
const replace = require("gulp-replace");
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { gray, red } from "ansi-colors";
|
||||
import { dest, src, watch } from "gulp";
|
||||
import esbuild from "gulp-esbuild";
|
||||
import ts from "gulp-typescript";
|
||||
import { createProject, reporter } from "gulp-typescript";
|
||||
import del from "del";
|
||||
|
||||
function goodReporter(): ts.reporter.Reporter {
|
||||
function goodReporter(): reporter.Reporter {
|
||||
return {
|
||||
error: (error, typescript) => {
|
||||
if (error.tsFile) {
|
||||
@@ -27,7 +27,7 @@ function goodReporter(): ts.reporter.Reporter {
|
||||
};
|
||||
}
|
||||
|
||||
const tsProject = ts.createProject("tsconfig.json");
|
||||
const tsProject = createProject("tsconfig.json");
|
||||
|
||||
export function cleanOutput() {
|
||||
return tsProject.projectDirectory
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import webpack from "webpack";
|
||||
import { Configuration, Stats, webpack } from "webpack";
|
||||
import { config } from "./webpack.config";
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
@@ -18,11 +18,11 @@ export function watchView(cb: (err?: Error) => void) {
|
||||
}
|
||||
|
||||
function doWebpack(
|
||||
internalConfig: webpack.Configuration,
|
||||
internalConfig: Configuration,
|
||||
failOnError: boolean,
|
||||
cb: (err?: Error) => void,
|
||||
) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
const resultCb = (error: Error | undefined, stats?: Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
*/
|
||||
|
||||
/** @type {import('@jest/types').Config.InitialOptions} */
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
projects: [
|
||||
"<rootDir>/src/view",
|
||||
|
||||
62
extensions/ql-vscode/package-lock.json
generated
62
extensions/ql-vscode/package-lock.json
generated
@@ -108,8 +108,10 @@
|
||||
"del": "^6.0.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-etc": "^2.0.2",
|
||||
"eslint-plugin-github": "^4.4.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jest-dom": "^5.0.1",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint-plugin-react": "^7.31.8",
|
||||
@@ -14981,6 +14983,31 @@
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-import-resolver-typescript": {
|
||||
"version": "3.6.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz",
|
||||
"integrity": "sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"debug": "^4.3.4",
|
||||
"enhanced-resolve": "^5.12.0",
|
||||
"eslint-module-utils": "^2.7.4",
|
||||
"fast-glob": "^3.3.1",
|
||||
"get-tsconfig": "^4.5.0",
|
||||
"is-core-module": "^2.11.0",
|
||||
"is-glob": "^4.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.18.0 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "*",
|
||||
"eslint-plugin-import": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-module-utils": {
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz",
|
||||
@@ -15121,9 +15148,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-import": {
|
||||
"version": "2.29.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz",
|
||||
"integrity": "sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==",
|
||||
"version": "2.29.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz",
|
||||
"integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"array-includes": "^3.1.7",
|
||||
@@ -15142,7 +15169,7 @@
|
||||
"object.groupby": "^1.0.1",
|
||||
"object.values": "^1.1.7",
|
||||
"semver": "^6.3.1",
|
||||
"tsconfig-paths": "^3.14.2"
|
||||
"tsconfig-paths": "^3.15.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
@@ -17319,6 +17346,18 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-tsconfig": {
|
||||
"version": "4.7.2",
|
||||
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz",
|
||||
"integrity": "sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"resolve-pkg-maps": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/get-value": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz",
|
||||
@@ -27594,6 +27633,15 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/resolve-pkg-maps": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
|
||||
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
|
||||
"dev": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/resolve-url": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz",
|
||||
@@ -30148,9 +30196,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tsconfig-paths": {
|
||||
"version": "3.14.2",
|
||||
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz",
|
||||
"integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==",
|
||||
"version": "3.15.0",
|
||||
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
|
||||
"integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/json5": "^0.0.29",
|
||||
|
||||
@@ -2005,8 +2005,10 @@
|
||||
"del": "^6.0.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-etc": "^2.0.2",
|
||||
"eslint-plugin-github": "^4.4.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jest-dom": "^5.0.1",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint-plugin-react": "^7.31.8",
|
||||
|
||||
@@ -275,6 +275,7 @@ export class ExtensionTelemetryListener
|
||||
/**
|
||||
* The global Telemetry instance
|
||||
*/
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let telemetryListener: ExtensionTelemetryListener | undefined;
|
||||
|
||||
export async function initializeTelemetry(
|
||||
|
||||
@@ -164,18 +164,18 @@ export async function interpretResultsSarif(
|
||||
* Call cli command to interpret graph results.
|
||||
*/
|
||||
export async function interpretGraphResults(
|
||||
cli: cli.CodeQLCliServer,
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo,
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
const dot = await cliServer.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: "GraphInterpretationData" };
|
||||
}
|
||||
|
||||
const dot = await cli.interpretBqrsGraph(
|
||||
const dot = await cliServer.interpretBqrsGraph(
|
||||
ensureMetadataIsComplete(metadata),
|
||||
resultsPath,
|
||||
interpretedResultsPath,
|
||||
|
||||
@@ -27,7 +27,7 @@ const render = () => {
|
||||
|
||||
// It's a lot harder to use dynamic imports since those don't import the CSS
|
||||
// and require a less strict CSP policy
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-dynamic-require
|
||||
const view: WebviewDefinition = require(`./${viewName}/index.tsx`).default;
|
||||
|
||||
const root = createRoot(element);
|
||||
|
||||
@@ -10,7 +10,14 @@ import {
|
||||
import { extLogger } from "../../../../src/common/logging/vscode";
|
||||
import * as ghApiClient from "../../../../src/variant-analysis/gh-api/gh-api-client";
|
||||
import * as ghActionsApiClient from "../../../../src/variant-analysis/gh-api/gh-actions-api-client";
|
||||
import * as fs from "fs-extra";
|
||||
import {
|
||||
ensureDir,
|
||||
outputJson,
|
||||
pathExists,
|
||||
readFile,
|
||||
readJson,
|
||||
remove,
|
||||
} from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { Readable } from "stream";
|
||||
import * as fetchModule from "node-fetch";
|
||||
@@ -100,7 +107,7 @@ describe("Variant Analysis Manager", () => {
|
||||
|
||||
describe("when the directory exists", () => {
|
||||
beforeEach(async () => {
|
||||
await fs.ensureDir(join(storagePath, variantAnalysis.id.toString()));
|
||||
await ensureDir(join(storagePath, variantAnalysis.id.toString()));
|
||||
});
|
||||
|
||||
it("should store the variant analysis", async () => {
|
||||
@@ -216,7 +223,7 @@ describe("Variant Analysis Manager", () => {
|
||||
__dirname,
|
||||
"data/variant-analysis-results.zip",
|
||||
);
|
||||
const fileContents = fs.readFileSync(sourceFilePath);
|
||||
const fileContents = await readFile(sourceFilePath);
|
||||
const response = new Response(Readable.from(fileContents));
|
||||
response.size = fileContents.length;
|
||||
getVariantAnalysisRepoResultStub.mockResolvedValue(response);
|
||||
@@ -263,7 +270,7 @@ describe("Variant Analysis Manager", () => {
|
||||
variantAnalysis,
|
||||
);
|
||||
|
||||
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({
|
||||
await expect(readJson(repoStatesPath)).resolves.toEqual({
|
||||
[scannedRepos[0].repository.id]: {
|
||||
repositoryId: scannedRepos[0].repository.id,
|
||||
downloadStatus:
|
||||
@@ -284,7 +291,7 @@ describe("Variant Analysis Manager", () => {
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
await expect(pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it("should have a failed repo state when the repo task API fails", async () => {
|
||||
@@ -299,14 +306,14 @@ describe("Variant Analysis Manager", () => {
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
await expect(pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
|
||||
await variantAnalysisManager.autoDownloadVariantAnalysisResult(
|
||||
scannedRepos[1],
|
||||
variantAnalysis,
|
||||
);
|
||||
|
||||
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({
|
||||
await expect(readJson(repoStatesPath)).resolves.toEqual({
|
||||
[scannedRepos[0].repository.id]: {
|
||||
repositoryId: scannedRepos[0].repository.id,
|
||||
downloadStatus:
|
||||
@@ -332,14 +339,14 @@ describe("Variant Analysis Manager", () => {
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
await expect(pathExists(repoStatesPath)).resolves.toBe(false);
|
||||
|
||||
await variantAnalysisManager.autoDownloadVariantAnalysisResult(
|
||||
scannedRepos[1],
|
||||
variantAnalysis,
|
||||
);
|
||||
|
||||
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({
|
||||
await expect(readJson(repoStatesPath)).resolves.toEqual({
|
||||
[scannedRepos[0].repository.id]: {
|
||||
repositoryId: scannedRepos[0].repository.id,
|
||||
downloadStatus:
|
||||
@@ -374,7 +381,7 @@ describe("Variant Analysis Manager", () => {
|
||||
variantAnalysis,
|
||||
);
|
||||
|
||||
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({
|
||||
await expect(readJson(repoStatesPath)).resolves.toEqual({
|
||||
[scannedRepos[1].repository.id]: {
|
||||
repositoryId: scannedRepos[1].repository.id,
|
||||
downloadStatus:
|
||||
@@ -396,7 +403,7 @@ describe("Variant Analysis Manager", () => {
|
||||
async function mockRepoStates(
|
||||
repoStates: Record<number, VariantAnalysisScannedRepositoryState>,
|
||||
) {
|
||||
await fs.outputJson(repoStatesPath, repoStates);
|
||||
await outputJson(repoStatesPath, repoStates);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -440,7 +447,7 @@ describe("Variant Analysis Manager", () => {
|
||||
});
|
||||
|
||||
it("should remove variant analysis", async () => {
|
||||
await fs.ensureDir(join(storagePath, dummyVariantAnalysis.id.toString()));
|
||||
await ensureDir(join(storagePath, dummyVariantAnalysis.id.toString()));
|
||||
|
||||
await variantAnalysisManager.rehydrateVariantAnalysis(
|
||||
dummyVariantAnalysis,
|
||||
@@ -453,7 +460,7 @@ describe("Variant Analysis Manager", () => {
|
||||
expect(variantAnalysisManager.variantAnalysesSize).toBe(0);
|
||||
|
||||
await expect(
|
||||
fs.pathExists(join(storagePath, dummyVariantAnalysis.id.toString())),
|
||||
pathExists(join(storagePath, dummyVariantAnalysis.id.toString())),
|
||||
).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -498,8 +505,8 @@ describe("Variant Analysis Manager", () => {
|
||||
await createTimestampFile(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(variantAnalysisStorageLocation, { recursive: true });
|
||||
afterEach(async () => {
|
||||
await remove(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
describe("when the variant analysis is not complete", () => {
|
||||
@@ -574,8 +581,8 @@ describe("Variant Analysis Manager", () => {
|
||||
await variantAnalysisManager.rehydrateVariantAnalysis(variantAnalysis);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(variantAnalysisStorageLocation, { recursive: true });
|
||||
afterEach(async () => {
|
||||
await remove(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
it("should return early if the variant analysis is not found", async () => {
|
||||
@@ -637,8 +644,8 @@ describe("Variant Analysis Manager", () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(variantAnalysisStorageLocation, { recursive: true });
|
||||
afterEach(async () => {
|
||||
await remove(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
describe("when the variant analysis does not have any repositories", () => {
|
||||
@@ -813,8 +820,8 @@ describe("Variant Analysis Manager", () => {
|
||||
.mockResolvedValue(mockedObject<TextDocument>({}));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(variantAnalysisStorageLocation, { recursive: true });
|
||||
afterEach(async () => {
|
||||
await remove(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
it("opens the query text", async () => {
|
||||
@@ -861,8 +868,8 @@ describe("Variant Analysis Manager", () => {
|
||||
.mockResolvedValue(mockedObject<TextDocument>({}));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(variantAnalysisStorageLocation, { recursive: true });
|
||||
afterEach(async () => {
|
||||
await remove(variantAnalysisStorageLocation);
|
||||
});
|
||||
|
||||
it("opens the query file", async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { extLogger } from "../../../../src/common/logging/vscode";
|
||||
import * as fs from "fs-extra";
|
||||
import { readFile, pathExists, remove, outputJson, readJson } from "fs-extra";
|
||||
import { join, resolve } from "path";
|
||||
import { Readable } from "stream";
|
||||
import * as fetchModule from "node-fetch";
|
||||
@@ -54,8 +54,8 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (await fs.pathExists(variantAnalysisStoragePath)) {
|
||||
await fs.remove(variantAnalysisStoragePath);
|
||||
if (await pathExists(variantAnalysisStoragePath)) {
|
||||
await remove(variantAnalysisStoragePath);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -97,7 +97,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
__dirname,
|
||||
"data/variant-analysis-results.zip",
|
||||
);
|
||||
fileContents = fs.readFileSync(sourceFilePath);
|
||||
fileContents = await readFile(sourceFilePath);
|
||||
|
||||
getVariantAnalysisRepoResultStub = jest
|
||||
.spyOn(fetchModule, "default")
|
||||
@@ -128,9 +128,9 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
() => Promise.resolve(),
|
||||
);
|
||||
|
||||
expect(fs.existsSync(`${repoTaskStorageDirectory}/results.zip`)).toBe(
|
||||
true,
|
||||
);
|
||||
expect(
|
||||
await pathExists(`${repoTaskStorageDirectory}/results.zip`),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("should unzip the results in a `results/` folder", async () => {
|
||||
@@ -142,7 +142,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
);
|
||||
|
||||
expect(
|
||||
fs.existsSync(`${repoTaskStorageDirectory}/results/results.sarif`),
|
||||
await pathExists(`${repoTaskStorageDirectory}/results/results.sarif`),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
@@ -237,8 +237,8 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (await fs.pathExists(variantAnalysisStoragePath)) {
|
||||
await fs.remove(variantAnalysisStoragePath);
|
||||
if (await pathExists(variantAnalysisStoragePath)) {
|
||||
await remove(variantAnalysisStoragePath);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -256,7 +256,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
|
||||
describe("when the repo task has been written to disk", () => {
|
||||
beforeEach(async () => {
|
||||
await fs.outputJson(
|
||||
await outputJson(
|
||||
join(repoTaskStorageDirectory, "repo_task.json"),
|
||||
dummyRepoTask,
|
||||
);
|
||||
@@ -276,9 +276,9 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
|
||||
describe("when the SARIF results are downloaded", () => {
|
||||
beforeEach(async () => {
|
||||
await fs.outputJson(
|
||||
await outputJson(
|
||||
join(repoTaskStorageDirectory, "results/results.sarif"),
|
||||
await fs.readJson(
|
||||
await readJson(
|
||||
resolve(__dirname, "../../../data/sarif/validSarif.sarif"),
|
||||
),
|
||||
);
|
||||
@@ -313,7 +313,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
onResultLoadedSpy.mockClear();
|
||||
|
||||
// Delete the directory so it can't read from disk
|
||||
await fs.remove(variantAnalysisStoragePath);
|
||||
await remove(variantAnalysisStoragePath);
|
||||
|
||||
await expect(
|
||||
variantAnalysisResultsManager.loadResults(
|
||||
@@ -343,7 +343,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
);
|
||||
|
||||
// Delete the directory so it can't read from disk
|
||||
await fs.remove(variantAnalysisStoragePath);
|
||||
await remove(variantAnalysisStoragePath);
|
||||
|
||||
await expect(
|
||||
variantAnalysisResultsManager.loadResults(
|
||||
@@ -365,7 +365,7 @@ describe(VariantAnalysisResultsManager.name, () => {
|
||||
);
|
||||
|
||||
// Delete the directory so it can't read from disk
|
||||
await fs.remove(variantAnalysisStoragePath);
|
||||
await remove(variantAnalysisStoragePath);
|
||||
|
||||
await expect(
|
||||
variantAnalysisResultsManager.loadResults(
|
||||
|
||||
@@ -21,6 +21,7 @@ export const dbLoc = join(
|
||||
realpathSync(join(__dirname, "../../../")),
|
||||
"build/tests/db.zip",
|
||||
);
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let storagePath: string;
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as tmp from "tmp";
|
||||
import * as fs from "fs-extra";
|
||||
import { ensureDir, ensureFile, pathExists, writeFile } from "fs-extra";
|
||||
import { join } from "path";
|
||||
import { ExtensionContext, Uri, workspace } from "vscode";
|
||||
|
||||
@@ -216,7 +216,7 @@ describe("local databases", () => {
|
||||
|
||||
it("should remove a database item", async () => {
|
||||
const mockDbItem = createMockDB(dir);
|
||||
await fs.ensureDir(mockDbItem.databaseUri.fsPath);
|
||||
await ensureDir(mockDbItem.databaseUri.fsPath);
|
||||
|
||||
// pretend that this item is the first workspace folder in the list
|
||||
jest
|
||||
@@ -235,14 +235,14 @@ describe("local databases", () => {
|
||||
expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1);
|
||||
|
||||
// should also delete the db contents
|
||||
await expect(fs.pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
|
||||
await expect(pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it("should remove a database item outside of the extension controlled area", async () => {
|
||||
const mockDbItem = createMockDB(dir);
|
||||
await fs.ensureDir(mockDbItem.databaseUri.fsPath);
|
||||
await ensureDir(mockDbItem.databaseUri.fsPath);
|
||||
|
||||
// pretend that this item is the first workspace folder in the list
|
||||
jest
|
||||
@@ -263,7 +263,7 @@ describe("local databases", () => {
|
||||
expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1);
|
||||
|
||||
// should NOT delete the db contents
|
||||
await expect(fs.pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
|
||||
await expect(pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
@@ -432,11 +432,11 @@ describe("local databases", () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
directoryPath = join(dir.name, "dir");
|
||||
await fs.ensureDir(directoryPath);
|
||||
await ensureDir(directoryPath);
|
||||
projectPath = join(directoryPath, "dir.testproj");
|
||||
await fs.writeFile(projectPath, "");
|
||||
await writeFile(projectPath, "");
|
||||
qlFilePath = join(directoryPath, "test.ql");
|
||||
await fs.writeFile(qlFilePath, "");
|
||||
await writeFile(qlFilePath, "");
|
||||
});
|
||||
|
||||
it("should return true for testproj database in test directory", async () => {
|
||||
@@ -463,7 +463,7 @@ describe("local databases", () => {
|
||||
|
||||
it("should return false for non-testproj database in test directory", async () => {
|
||||
const anotherProjectPath = join(directoryPath, "dir.proj");
|
||||
await fs.writeFile(anotherProjectPath, "");
|
||||
await writeFile(anotherProjectPath, "");
|
||||
|
||||
const db = createMockDB(
|
||||
dir,
|
||||
@@ -476,9 +476,9 @@ describe("local databases", () => {
|
||||
|
||||
it("should return false for testproj database outside test directory", async () => {
|
||||
const anotherProjectDir = join(dir.name, "other");
|
||||
await fs.ensureDir(anotherProjectDir);
|
||||
await ensureDir(anotherProjectDir);
|
||||
const anotherProjectPath = join(anotherProjectDir, "other.testproj");
|
||||
await fs.writeFile(anotherProjectPath, "");
|
||||
await writeFile(anotherProjectPath, "");
|
||||
|
||||
const db = createMockDB(
|
||||
dir,
|
||||
@@ -524,7 +524,7 @@ describe("local databases", () => {
|
||||
|
||||
it("should return false for non-testproj database for test file", async () => {
|
||||
const anotherProjectPath = join(directoryPath, "dir.proj");
|
||||
await fs.writeFile(anotherProjectPath, "");
|
||||
await writeFile(anotherProjectPath, "");
|
||||
|
||||
const db = createMockDB(
|
||||
dir,
|
||||
@@ -537,7 +537,7 @@ describe("local databases", () => {
|
||||
|
||||
it("should return false for testproj database not matching test file", async () => {
|
||||
const otherTestFile = join(dir.name, "test.ql");
|
||||
await fs.writeFile(otherTestFile, "");
|
||||
await writeFile(otherTestFile, "");
|
||||
|
||||
const db = createMockDB(
|
||||
dir,
|
||||
@@ -553,24 +553,24 @@ describe("local databases", () => {
|
||||
["src", "output/src_archive"].forEach((name) => {
|
||||
it(`should find source folder in ${name}`, async () => {
|
||||
const uri = Uri.file(join(dir.name, name));
|
||||
fs.createFileSync(join(uri.fsPath, "hucairz.txt"));
|
||||
await ensureFile(join(uri.fsPath, "hucairz.txt"));
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
});
|
||||
|
||||
it(`should find source archive in ${name}.zip`, async () => {
|
||||
const uri = Uri.file(join(dir.name, `${name}.zip`));
|
||||
fs.createFileSync(uri.fsPath);
|
||||
await ensureFile(uri.fsPath);
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
});
|
||||
|
||||
it(`should prioritize ${name}.zip over ${name}`, async () => {
|
||||
const uri = Uri.file(join(dir.name, `${name}.zip`));
|
||||
fs.createFileSync(uri.fsPath);
|
||||
await ensureFile(uri.fsPath);
|
||||
|
||||
const uriFolder = Uri.file(join(dir.name, name));
|
||||
fs.createFileSync(join(uriFolder.fsPath, "hucairz.txt"));
|
||||
await ensureFile(join(uriFolder.fsPath, "hucairz.txt"));
|
||||
|
||||
const srcUri = await findSourceArchive(dir.name);
|
||||
expect(srcUri!.fsPath).toBe(uri.fsPath);
|
||||
@@ -579,9 +579,9 @@ describe("local databases", () => {
|
||||
|
||||
it("should prioritize src over output/src_archive", async () => {
|
||||
const uriSrc = Uri.file(join(dir.name, "src.zip"));
|
||||
fs.createFileSync(uriSrc.fsPath);
|
||||
await ensureFile(uriSrc.fsPath);
|
||||
const uriSrcArchive = Uri.file(join(dir.name, "src.zip"));
|
||||
fs.createFileSync(uriSrcArchive.fsPath);
|
||||
await ensureFile(uriSrcArchive.fsPath);
|
||||
|
||||
const resultUri = await findSourceArchive(dir.name);
|
||||
expect(resultUri!.fsPath).toBe(uriSrc.fsPath);
|
||||
@@ -679,8 +679,8 @@ describe("local databases", () => {
|
||||
});
|
||||
|
||||
describe("when the QL pack already exists", () => {
|
||||
beforeEach(() => {
|
||||
fs.mkdirSync(join(dir.name, `codeql-custom-queries-${language}`));
|
||||
beforeEach(async () => {
|
||||
await ensureDir(join(dir.name, `codeql-custom-queries-${language}`));
|
||||
});
|
||||
|
||||
it("should exit early", async () => {
|
||||
|
||||
@@ -177,6 +177,7 @@ const packageConfiguration: PackageConfiguration =
|
||||
return properties;
|
||||
})();
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let vscodeGetConfigurationMock: jest.SpiedFunction<
|
||||
typeof workspace.getConfiguration
|
||||
>;
|
||||
|
||||
Reference in New Issue
Block a user