Add eslint-plugin-import

It seems like we had some rules that disabled rules of this plugin, but
we didn't actually have it installed. I've now installed it, used the
recommended configuration, and removed our own disable rules. I've fixed
any errors that this introduced.
This commit is contained in:
Koen Vlaswinkel
2023-12-21 17:02:31 +01:00
parent 43ea7eb41d
commit 7a7092de0d
16 changed files with 160 additions and 93 deletions

View File

@@ -27,6 +27,8 @@ const baseConfig = {
"plugin:jest-dom/recommended", "plugin:jest-dom/recommended",
"plugin:prettier/recommended", "plugin:prettier/recommended",
"plugin:@typescript-eslint/recommended", "plugin:@typescript-eslint/recommended",
"plugin:import/recommended",
"plugin:import/typescript",
], ],
rules: { rules: {
"@typescript-eslint/await-thenable": "error", "@typescript-eslint/await-thenable": "error",
@@ -57,15 +59,6 @@ const baseConfig = {
"filenames/match-regexp": "off", "filenames/match-regexp": "off",
"func-style": "off", "func-style": "off",
"i18n-text/no-en": "off", "i18n-text/no-en": "off",
"import/named": "off",
"import/no-dynamic-require": "off",
"import/no-dynamic-required": "off",
"import/no-anonymous-default-export": "off",
"import/no-commonjs": "off",
"import/no-mutable-exports": "off",
"import/no-namespace": "off",
"import/no-unresolved": "off",
"import/no-webpack-loader-syntax": "off",
"no-invalid-this": "off", "no-invalid-this": "off",
"no-fallthrough": "off", "no-fallthrough": "off",
"no-console": "off", "no-console": "off",
@@ -73,6 +66,19 @@ const baseConfig = {
"github/array-foreach": "off", "github/array-foreach": "off",
"github/no-then": "off", "github/no-then": "off",
"react/jsx-key": ["error", { checkFragmentShorthand: true }], "react/jsx-key": ["error", { checkFragmentShorthand: true }],
"import/no-cycle": "off",
"import/no-namespace": "off",
// Never allow extensions in import paths, except for JSON files where they are required.
"import/extensions": ["error", "never", { json: "always" }],
},
settings: {
"import/resolver": {
typescript: true,
node: true,
},
"import/extensions": [".js", ".jsx", ".ts", ".tsx", ".json"],
// vscode and sarif don't exist on-disk, but only provide types.
"import/core-modules": ["vscode", "sarif"],
}, },
}; };

View File

@@ -8,27 +8,27 @@ import { VSCodeTheme } from "./theme";
const themeFiles: { [key in VSCodeTheme]: string } = { const themeFiles: { [key in VSCodeTheme]: string } = {
[VSCodeTheme.Dark]: [VSCodeTheme.Dark]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-dark.css") require("!file-loader?modules!../../src/stories/vscode-theme-dark.css")
.default, .default,
[VSCodeTheme.Light]: [VSCodeTheme.Light]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-light.css") require("!file-loader?modules!../../src/stories/vscode-theme-light.css")
.default, .default,
[VSCodeTheme.LightHighContrast]: [VSCodeTheme.LightHighContrast]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-light-high-contrast.css") require("!file-loader?modules!../../src/stories/vscode-theme-light-high-contrast.css")
.default, .default,
[VSCodeTheme.DarkHighContrast]: [VSCodeTheme.DarkHighContrast]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-dark-high-contrast.css") require("!file-loader?modules!../../src/stories/vscode-theme-dark-high-contrast.css")
.default, .default,
[VSCodeTheme.GitHubLightDefault]: [VSCodeTheme.GitHubLightDefault]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-github-light-default.css") require("!file-loader?modules!../../src/stories/vscode-theme-github-light-default.css")
.default, .default,
[VSCodeTheme.GitHubDarkDefault]: [VSCodeTheme.GitHubDarkDefault]:
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
require("!file-loader?modules!../../src/stories/vscode-theme-github-dark-default.css") require("!file-loader?modules!../../src/stories/vscode-theme-github-dark-default.css")
.default, .default,
}; };

View File

@@ -1,5 +1,5 @@
import { src, dest } from "gulp"; import { src, dest } from "gulp";
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs
const replace = require("gulp-replace"); const replace = require("gulp-replace");
/** Inject the application insights key into the telemetry file */ /** Inject the application insights key into the telemetry file */

View File

@@ -1,10 +1,10 @@
import { gray, red } from "ansi-colors"; import { gray, red } from "ansi-colors";
import { dest, src, watch } from "gulp"; import { dest, src, watch } from "gulp";
import esbuild from "gulp-esbuild"; import esbuild from "gulp-esbuild";
import ts from "gulp-typescript"; import { createProject, reporter } from "gulp-typescript";
import del from "del"; import del from "del";
function goodReporter(): ts.reporter.Reporter { function goodReporter(): reporter.Reporter {
return { return {
error: (error, typescript) => { error: (error, typescript) => {
if (error.tsFile) { if (error.tsFile) {
@@ -27,7 +27,7 @@ function goodReporter(): ts.reporter.Reporter {
}; };
} }
const tsProject = ts.createProject("tsconfig.json"); const tsProject = createProject("tsconfig.json");
export function cleanOutput() { export function cleanOutput() {
return tsProject.projectDirectory return tsProject.projectDirectory

View File

@@ -1,4 +1,4 @@
import webpack from "webpack"; import { Configuration, Stats, webpack } from "webpack";
import { config } from "./webpack.config"; import { config } from "./webpack.config";
export function compileView(cb: (err?: Error) => void) { export function compileView(cb: (err?: Error) => void) {
@@ -18,11 +18,11 @@ export function watchView(cb: (err?: Error) => void) {
} }
function doWebpack( function doWebpack(
internalConfig: webpack.Configuration, internalConfig: Configuration,
failOnError: boolean, failOnError: boolean,
cb: (err?: Error) => void, cb: (err?: Error) => void,
) { ) {
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => { const resultCb = (error: Error | undefined, stats?: Stats) => {
if (error) { if (error) {
cb(error); cb(error);
} }

View File

@@ -4,6 +4,7 @@
*/ */
/** @type {import('@jest/types').Config.InitialOptions} */ /** @type {import('@jest/types').Config.InitialOptions} */
// eslint-disable-next-line import/no-commonjs
module.exports = { module.exports = {
projects: [ projects: [
"<rootDir>/src/view", "<rootDir>/src/view",

View File

@@ -108,8 +108,10 @@
"del": "^6.0.0", "del": "^6.0.0",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"eslint-config-prettier": "^9.0.0", "eslint-config-prettier": "^9.0.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-etc": "^2.0.2", "eslint-plugin-etc": "^2.0.2",
"eslint-plugin-github": "^4.4.1", "eslint-plugin-github": "^4.4.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-jest-dom": "^5.0.1", "eslint-plugin-jest-dom": "^5.0.1",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-react": "^7.31.8", "eslint-plugin-react": "^7.31.8",
@@ -14981,6 +14983,31 @@
"ms": "^2.1.1" "ms": "^2.1.1"
} }
}, },
"node_modules/eslint-import-resolver-typescript": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz",
"integrity": "sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg==",
"dev": true,
"dependencies": {
"debug": "^4.3.4",
"enhanced-resolve": "^5.12.0",
"eslint-module-utils": "^2.7.4",
"fast-glob": "^3.3.1",
"get-tsconfig": "^4.5.0",
"is-core-module": "^2.11.0",
"is-glob": "^4.0.3"
},
"engines": {
"node": "^14.18.0 || >=16.0.0"
},
"funding": {
"url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts"
},
"peerDependencies": {
"eslint": "*",
"eslint-plugin-import": "*"
}
},
"node_modules/eslint-module-utils": { "node_modules/eslint-module-utils": {
"version": "2.8.0", "version": "2.8.0",
"resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz",
@@ -15121,9 +15148,9 @@
} }
}, },
"node_modules/eslint-plugin-import": { "node_modules/eslint-plugin-import": {
"version": "2.29.0", "version": "2.29.1",
"resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz", "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz",
"integrity": "sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==", "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"array-includes": "^3.1.7", "array-includes": "^3.1.7",
@@ -15142,7 +15169,7 @@
"object.groupby": "^1.0.1", "object.groupby": "^1.0.1",
"object.values": "^1.1.7", "object.values": "^1.1.7",
"semver": "^6.3.1", "semver": "^6.3.1",
"tsconfig-paths": "^3.14.2" "tsconfig-paths": "^3.15.0"
}, },
"engines": { "engines": {
"node": ">=4" "node": ">=4"
@@ -17319,6 +17346,18 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/get-tsconfig": {
"version": "4.7.2",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz",
"integrity": "sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==",
"dev": true,
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
"funding": {
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
}
},
"node_modules/get-value": { "node_modules/get-value": {
"version": "2.0.6", "version": "2.0.6",
"resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz",
@@ -27594,6 +27633,15 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
"dev": true,
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/resolve-url": { "node_modules/resolve-url": {
"version": "0.2.1", "version": "0.2.1",
"resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz",
@@ -30148,9 +30196,9 @@
} }
}, },
"node_modules/tsconfig-paths": { "node_modules/tsconfig-paths": {
"version": "3.14.2", "version": "3.15.0",
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
"integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@types/json5": "^0.0.29", "@types/json5": "^0.0.29",

View File

@@ -2005,8 +2005,10 @@
"del": "^6.0.0", "del": "^6.0.0",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"eslint-config-prettier": "^9.0.0", "eslint-config-prettier": "^9.0.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-etc": "^2.0.2", "eslint-plugin-etc": "^2.0.2",
"eslint-plugin-github": "^4.4.1", "eslint-plugin-github": "^4.4.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-jest-dom": "^5.0.1", "eslint-plugin-jest-dom": "^5.0.1",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-react": "^7.31.8", "eslint-plugin-react": "^7.31.8",

View File

@@ -275,6 +275,7 @@ export class ExtensionTelemetryListener
/** /**
* The global Telemetry instance * The global Telemetry instance
*/ */
// eslint-disable-next-line import/no-mutable-exports
export let telemetryListener: ExtensionTelemetryListener | undefined; export let telemetryListener: ExtensionTelemetryListener | undefined;
export async function initializeTelemetry( export async function initializeTelemetry(

View File

@@ -164,18 +164,18 @@ export async function interpretResultsSarif(
* Call cli command to interpret graph results. * Call cli command to interpret graph results.
*/ */
export async function interpretGraphResults( export async function interpretGraphResults(
cli: cli.CodeQLCliServer, cliServer: cli.CodeQLCliServer,
metadata: QueryMetadata | undefined, metadata: QueryMetadata | undefined,
resultsPaths: ResultsPaths, resultsPaths: ResultsPaths,
sourceInfo?: cli.SourceInfo, sourceInfo?: cli.SourceInfo,
): Promise<GraphInterpretationData> { ): Promise<GraphInterpretationData> {
const { resultsPath, interpretedResultsPath } = resultsPaths; const { resultsPath, interpretedResultsPath } = resultsPaths;
if (await pathExists(interpretedResultsPath)) { if (await pathExists(interpretedResultsPath)) {
const dot = await cli.readDotFiles(interpretedResultsPath); const dot = await cliServer.readDotFiles(interpretedResultsPath);
return { dot, t: "GraphInterpretationData" }; return { dot, t: "GraphInterpretationData" };
} }
const dot = await cli.interpretBqrsGraph( const dot = await cliServer.interpretBqrsGraph(
ensureMetadataIsComplete(metadata), ensureMetadataIsComplete(metadata),
resultsPath, resultsPath,
interpretedResultsPath, interpretedResultsPath,

View File

@@ -27,7 +27,7 @@ const render = () => {
// It's a lot harder to use dynamic imports since those don't import the CSS // It's a lot harder to use dynamic imports since those don't import the CSS
// and require a less strict CSP policy // and require a less strict CSP policy
// eslint-disable-next-line @typescript-eslint/no-var-requires // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-dynamic-require
const view: WebviewDefinition = require(`./${viewName}/index.tsx`).default; const view: WebviewDefinition = require(`./${viewName}/index.tsx`).default;
const root = createRoot(element); const root = createRoot(element);

View File

@@ -10,7 +10,14 @@ import {
import { extLogger } from "../../../../src/common/logging/vscode"; import { extLogger } from "../../../../src/common/logging/vscode";
import * as ghApiClient from "../../../../src/variant-analysis/gh-api/gh-api-client"; import * as ghApiClient from "../../../../src/variant-analysis/gh-api/gh-api-client";
import * as ghActionsApiClient from "../../../../src/variant-analysis/gh-api/gh-actions-api-client"; import * as ghActionsApiClient from "../../../../src/variant-analysis/gh-api/gh-actions-api-client";
import * as fs from "fs-extra"; import {
ensureDir,
outputJson,
pathExists,
readFile,
readJson,
remove,
} from "fs-extra";
import { join } from "path"; import { join } from "path";
import { Readable } from "stream"; import { Readable } from "stream";
import * as fetchModule from "node-fetch"; import * as fetchModule from "node-fetch";
@@ -100,7 +107,7 @@ describe("Variant Analysis Manager", () => {
describe("when the directory exists", () => { describe("when the directory exists", () => {
beforeEach(async () => { beforeEach(async () => {
await fs.ensureDir(join(storagePath, variantAnalysis.id.toString())); await ensureDir(join(storagePath, variantAnalysis.id.toString()));
}); });
it("should store the variant analysis", async () => { it("should store the variant analysis", async () => {
@@ -216,7 +223,7 @@ describe("Variant Analysis Manager", () => {
__dirname, __dirname,
"data/variant-analysis-results.zip", "data/variant-analysis-results.zip",
); );
const fileContents = fs.readFileSync(sourceFilePath); const fileContents = await readFile(sourceFilePath);
const response = new Response(Readable.from(fileContents)); const response = new Response(Readable.from(fileContents));
response.size = fileContents.length; response.size = fileContents.length;
getVariantAnalysisRepoResultStub.mockResolvedValue(response); getVariantAnalysisRepoResultStub.mockResolvedValue(response);
@@ -263,7 +270,7 @@ describe("Variant Analysis Manager", () => {
variantAnalysis, variantAnalysis,
); );
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({ await expect(readJson(repoStatesPath)).resolves.toEqual({
[scannedRepos[0].repository.id]: { [scannedRepos[0].repository.id]: {
repositoryId: scannedRepos[0].repository.id, repositoryId: scannedRepos[0].repository.id,
downloadStatus: downloadStatus:
@@ -284,7 +291,7 @@ describe("Variant Analysis Manager", () => {
), ),
).rejects.toThrow(); ).rejects.toThrow();
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false); await expect(pathExists(repoStatesPath)).resolves.toBe(false);
}); });
it("should have a failed repo state when the repo task API fails", async () => { it("should have a failed repo state when the repo task API fails", async () => {
@@ -299,14 +306,14 @@ describe("Variant Analysis Manager", () => {
), ),
).rejects.toThrow(); ).rejects.toThrow();
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false); await expect(pathExists(repoStatesPath)).resolves.toBe(false);
await variantAnalysisManager.autoDownloadVariantAnalysisResult( await variantAnalysisManager.autoDownloadVariantAnalysisResult(
scannedRepos[1], scannedRepos[1],
variantAnalysis, variantAnalysis,
); );
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({ await expect(readJson(repoStatesPath)).resolves.toEqual({
[scannedRepos[0].repository.id]: { [scannedRepos[0].repository.id]: {
repositoryId: scannedRepos[0].repository.id, repositoryId: scannedRepos[0].repository.id,
downloadStatus: downloadStatus:
@@ -332,14 +339,14 @@ describe("Variant Analysis Manager", () => {
), ),
).rejects.toThrow(); ).rejects.toThrow();
await expect(fs.pathExists(repoStatesPath)).resolves.toBe(false); await expect(pathExists(repoStatesPath)).resolves.toBe(false);
await variantAnalysisManager.autoDownloadVariantAnalysisResult( await variantAnalysisManager.autoDownloadVariantAnalysisResult(
scannedRepos[1], scannedRepos[1],
variantAnalysis, variantAnalysis,
); );
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({ await expect(readJson(repoStatesPath)).resolves.toEqual({
[scannedRepos[0].repository.id]: { [scannedRepos[0].repository.id]: {
repositoryId: scannedRepos[0].repository.id, repositoryId: scannedRepos[0].repository.id,
downloadStatus: downloadStatus:
@@ -374,7 +381,7 @@ describe("Variant Analysis Manager", () => {
variantAnalysis, variantAnalysis,
); );
await expect(fs.readJson(repoStatesPath)).resolves.toEqual({ await expect(readJson(repoStatesPath)).resolves.toEqual({
[scannedRepos[1].repository.id]: { [scannedRepos[1].repository.id]: {
repositoryId: scannedRepos[1].repository.id, repositoryId: scannedRepos[1].repository.id,
downloadStatus: downloadStatus:
@@ -396,7 +403,7 @@ describe("Variant Analysis Manager", () => {
async function mockRepoStates( async function mockRepoStates(
repoStates: Record<number, VariantAnalysisScannedRepositoryState>, repoStates: Record<number, VariantAnalysisScannedRepositoryState>,
) { ) {
await fs.outputJson(repoStatesPath, repoStates); await outputJson(repoStatesPath, repoStates);
} }
}); });
}); });
@@ -440,7 +447,7 @@ describe("Variant Analysis Manager", () => {
}); });
it("should remove variant analysis", async () => { it("should remove variant analysis", async () => {
await fs.ensureDir(join(storagePath, dummyVariantAnalysis.id.toString())); await ensureDir(join(storagePath, dummyVariantAnalysis.id.toString()));
await variantAnalysisManager.rehydrateVariantAnalysis( await variantAnalysisManager.rehydrateVariantAnalysis(
dummyVariantAnalysis, dummyVariantAnalysis,
@@ -453,7 +460,7 @@ describe("Variant Analysis Manager", () => {
expect(variantAnalysisManager.variantAnalysesSize).toBe(0); expect(variantAnalysisManager.variantAnalysesSize).toBe(0);
await expect( await expect(
fs.pathExists(join(storagePath, dummyVariantAnalysis.id.toString())), pathExists(join(storagePath, dummyVariantAnalysis.id.toString())),
).resolves.toBe(false); ).resolves.toBe(false);
}); });
}); });
@@ -498,8 +505,8 @@ describe("Variant Analysis Manager", () => {
await createTimestampFile(variantAnalysisStorageLocation); await createTimestampFile(variantAnalysisStorageLocation);
}); });
afterEach(() => { afterEach(async () => {
fs.rmSync(variantAnalysisStorageLocation, { recursive: true }); await remove(variantAnalysisStorageLocation);
}); });
describe("when the variant analysis is not complete", () => { describe("when the variant analysis is not complete", () => {
@@ -574,8 +581,8 @@ describe("Variant Analysis Manager", () => {
await variantAnalysisManager.rehydrateVariantAnalysis(variantAnalysis); await variantAnalysisManager.rehydrateVariantAnalysis(variantAnalysis);
}); });
afterEach(() => { afterEach(async () => {
fs.rmSync(variantAnalysisStorageLocation, { recursive: true }); await remove(variantAnalysisStorageLocation);
}); });
it("should return early if the variant analysis is not found", async () => { it("should return early if the variant analysis is not found", async () => {
@@ -637,8 +644,8 @@ describe("Variant Analysis Manager", () => {
}); });
}); });
afterEach(() => { afterEach(async () => {
fs.rmSync(variantAnalysisStorageLocation, { recursive: true }); await remove(variantAnalysisStorageLocation);
}); });
describe("when the variant analysis does not have any repositories", () => { describe("when the variant analysis does not have any repositories", () => {
@@ -813,8 +820,8 @@ describe("Variant Analysis Manager", () => {
.mockResolvedValue(mockedObject<TextDocument>({})); .mockResolvedValue(mockedObject<TextDocument>({}));
}); });
afterEach(() => { afterEach(async () => {
fs.rmSync(variantAnalysisStorageLocation, { recursive: true }); await remove(variantAnalysisStorageLocation);
}); });
it("opens the query text", async () => { it("opens the query text", async () => {
@@ -861,8 +868,8 @@ describe("Variant Analysis Manager", () => {
.mockResolvedValue(mockedObject<TextDocument>({})); .mockResolvedValue(mockedObject<TextDocument>({}));
}); });
afterEach(() => { afterEach(async () => {
fs.rmSync(variantAnalysisStorageLocation, { recursive: true }); await remove(variantAnalysisStorageLocation);
}); });
it("opens the query file", async () => { it("opens the query file", async () => {

View File

@@ -1,5 +1,5 @@
import { extLogger } from "../../../../src/common/logging/vscode"; import { extLogger } from "../../../../src/common/logging/vscode";
import * as fs from "fs-extra"; import { readFile, pathExists, remove, outputJson, readJson } from "fs-extra";
import { join, resolve } from "path"; import { join, resolve } from "path";
import { Readable } from "stream"; import { Readable } from "stream";
import * as fetchModule from "node-fetch"; import * as fetchModule from "node-fetch";
@@ -54,8 +54,8 @@ describe(VariantAnalysisResultsManager.name, () => {
}); });
afterEach(async () => { afterEach(async () => {
if (await fs.pathExists(variantAnalysisStoragePath)) { if (await pathExists(variantAnalysisStoragePath)) {
await fs.remove(variantAnalysisStoragePath); await remove(variantAnalysisStoragePath);
} }
}); });
@@ -97,7 +97,7 @@ describe(VariantAnalysisResultsManager.name, () => {
__dirname, __dirname,
"data/variant-analysis-results.zip", "data/variant-analysis-results.zip",
); );
fileContents = fs.readFileSync(sourceFilePath); fileContents = await readFile(sourceFilePath);
getVariantAnalysisRepoResultStub = jest getVariantAnalysisRepoResultStub = jest
.spyOn(fetchModule, "default") .spyOn(fetchModule, "default")
@@ -128,9 +128,9 @@ describe(VariantAnalysisResultsManager.name, () => {
() => Promise.resolve(), () => Promise.resolve(),
); );
expect(fs.existsSync(`${repoTaskStorageDirectory}/results.zip`)).toBe( expect(
true, await pathExists(`${repoTaskStorageDirectory}/results.zip`),
); ).toBe(true);
}); });
it("should unzip the results in a `results/` folder", async () => { it("should unzip the results in a `results/` folder", async () => {
@@ -142,7 +142,7 @@ describe(VariantAnalysisResultsManager.name, () => {
); );
expect( expect(
fs.existsSync(`${repoTaskStorageDirectory}/results/results.sarif`), await pathExists(`${repoTaskStorageDirectory}/results/results.sarif`),
).toBe(true); ).toBe(true);
}); });
@@ -237,8 +237,8 @@ describe(VariantAnalysisResultsManager.name, () => {
}); });
afterEach(async () => { afterEach(async () => {
if (await fs.pathExists(variantAnalysisStoragePath)) { if (await pathExists(variantAnalysisStoragePath)) {
await fs.remove(variantAnalysisStoragePath); await remove(variantAnalysisStoragePath);
} }
}); });
@@ -256,7 +256,7 @@ describe(VariantAnalysisResultsManager.name, () => {
describe("when the repo task has been written to disk", () => { describe("when the repo task has been written to disk", () => {
beforeEach(async () => { beforeEach(async () => {
await fs.outputJson( await outputJson(
join(repoTaskStorageDirectory, "repo_task.json"), join(repoTaskStorageDirectory, "repo_task.json"),
dummyRepoTask, dummyRepoTask,
); );
@@ -276,9 +276,9 @@ describe(VariantAnalysisResultsManager.name, () => {
describe("when the SARIF results are downloaded", () => { describe("when the SARIF results are downloaded", () => {
beforeEach(async () => { beforeEach(async () => {
await fs.outputJson( await outputJson(
join(repoTaskStorageDirectory, "results/results.sarif"), join(repoTaskStorageDirectory, "results/results.sarif"),
await fs.readJson( await readJson(
resolve(__dirname, "../../../data/sarif/validSarif.sarif"), resolve(__dirname, "../../../data/sarif/validSarif.sarif"),
), ),
); );
@@ -313,7 +313,7 @@ describe(VariantAnalysisResultsManager.name, () => {
onResultLoadedSpy.mockClear(); onResultLoadedSpy.mockClear();
// Delete the directory so it can't read from disk // Delete the directory so it can't read from disk
await fs.remove(variantAnalysisStoragePath); await remove(variantAnalysisStoragePath);
await expect( await expect(
variantAnalysisResultsManager.loadResults( variantAnalysisResultsManager.loadResults(
@@ -343,7 +343,7 @@ describe(VariantAnalysisResultsManager.name, () => {
); );
// Delete the directory so it can't read from disk // Delete the directory so it can't read from disk
await fs.remove(variantAnalysisStoragePath); await remove(variantAnalysisStoragePath);
await expect( await expect(
variantAnalysisResultsManager.loadResults( variantAnalysisResultsManager.loadResults(
@@ -365,7 +365,7 @@ describe(VariantAnalysisResultsManager.name, () => {
); );
// Delete the directory so it can't read from disk // Delete the directory so it can't read from disk
await fs.remove(variantAnalysisStoragePath); await remove(variantAnalysisStoragePath);
await expect( await expect(
variantAnalysisResultsManager.loadResults( variantAnalysisResultsManager.loadResults(

View File

@@ -21,6 +21,7 @@ export const dbLoc = join(
realpathSync(join(__dirname, "../../../")), realpathSync(join(__dirname, "../../../")),
"build/tests/db.zip", "build/tests/db.zip",
); );
// eslint-disable-next-line import/no-mutable-exports
export let storagePath: string; export let storagePath: string;
/** /**

View File

@@ -1,5 +1,5 @@
import * as tmp from "tmp"; import * as tmp from "tmp";
import * as fs from "fs-extra"; import { ensureDir, ensureFile, pathExists, writeFile } from "fs-extra";
import { join } from "path"; import { join } from "path";
import { ExtensionContext, Uri, workspace } from "vscode"; import { ExtensionContext, Uri, workspace } from "vscode";
@@ -216,7 +216,7 @@ describe("local databases", () => {
it("should remove a database item", async () => { it("should remove a database item", async () => {
const mockDbItem = createMockDB(dir); const mockDbItem = createMockDB(dir);
await fs.ensureDir(mockDbItem.databaseUri.fsPath); await ensureDir(mockDbItem.databaseUri.fsPath);
// pretend that this item is the first workspace folder in the list // pretend that this item is the first workspace folder in the list
jest jest
@@ -235,14 +235,14 @@ describe("local databases", () => {
expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1); expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1);
// should also delete the db contents // should also delete the db contents
await expect(fs.pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe( await expect(pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
false, false,
); );
}); });
it("should remove a database item outside of the extension controlled area", async () => { it("should remove a database item outside of the extension controlled area", async () => {
const mockDbItem = createMockDB(dir); const mockDbItem = createMockDB(dir);
await fs.ensureDir(mockDbItem.databaseUri.fsPath); await ensureDir(mockDbItem.databaseUri.fsPath);
// pretend that this item is the first workspace folder in the list // pretend that this item is the first workspace folder in the list
jest jest
@@ -263,7 +263,7 @@ describe("local databases", () => {
expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1); expect(workspace.updateWorkspaceFolders).toBeCalledWith(0, 1);
// should NOT delete the db contents // should NOT delete the db contents
await expect(fs.pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe( await expect(pathExists(mockDbItem.databaseUri.fsPath)).resolves.toBe(
true, true,
); );
}); });
@@ -432,11 +432,11 @@ describe("local databases", () => {
beforeEach(async () => { beforeEach(async () => {
directoryPath = join(dir.name, "dir"); directoryPath = join(dir.name, "dir");
await fs.ensureDir(directoryPath); await ensureDir(directoryPath);
projectPath = join(directoryPath, "dir.testproj"); projectPath = join(directoryPath, "dir.testproj");
await fs.writeFile(projectPath, ""); await writeFile(projectPath, "");
qlFilePath = join(directoryPath, "test.ql"); qlFilePath = join(directoryPath, "test.ql");
await fs.writeFile(qlFilePath, ""); await writeFile(qlFilePath, "");
}); });
it("should return true for testproj database in test directory", async () => { it("should return true for testproj database in test directory", async () => {
@@ -463,7 +463,7 @@ describe("local databases", () => {
it("should return false for non-testproj database in test directory", async () => { it("should return false for non-testproj database in test directory", async () => {
const anotherProjectPath = join(directoryPath, "dir.proj"); const anotherProjectPath = join(directoryPath, "dir.proj");
await fs.writeFile(anotherProjectPath, ""); await writeFile(anotherProjectPath, "");
const db = createMockDB( const db = createMockDB(
dir, dir,
@@ -476,9 +476,9 @@ describe("local databases", () => {
it("should return false for testproj database outside test directory", async () => { it("should return false for testproj database outside test directory", async () => {
const anotherProjectDir = join(dir.name, "other"); const anotherProjectDir = join(dir.name, "other");
await fs.ensureDir(anotherProjectDir); await ensureDir(anotherProjectDir);
const anotherProjectPath = join(anotherProjectDir, "other.testproj"); const anotherProjectPath = join(anotherProjectDir, "other.testproj");
await fs.writeFile(anotherProjectPath, ""); await writeFile(anotherProjectPath, "");
const db = createMockDB( const db = createMockDB(
dir, dir,
@@ -524,7 +524,7 @@ describe("local databases", () => {
it("should return false for non-testproj database for test file", async () => { it("should return false for non-testproj database for test file", async () => {
const anotherProjectPath = join(directoryPath, "dir.proj"); const anotherProjectPath = join(directoryPath, "dir.proj");
await fs.writeFile(anotherProjectPath, ""); await writeFile(anotherProjectPath, "");
const db = createMockDB( const db = createMockDB(
dir, dir,
@@ -537,7 +537,7 @@ describe("local databases", () => {
it("should return false for testproj database not matching test file", async () => { it("should return false for testproj database not matching test file", async () => {
const otherTestFile = join(dir.name, "test.ql"); const otherTestFile = join(dir.name, "test.ql");
await fs.writeFile(otherTestFile, ""); await writeFile(otherTestFile, "");
const db = createMockDB( const db = createMockDB(
dir, dir,
@@ -553,24 +553,24 @@ describe("local databases", () => {
["src", "output/src_archive"].forEach((name) => { ["src", "output/src_archive"].forEach((name) => {
it(`should find source folder in ${name}`, async () => { it(`should find source folder in ${name}`, async () => {
const uri = Uri.file(join(dir.name, name)); const uri = Uri.file(join(dir.name, name));
fs.createFileSync(join(uri.fsPath, "hucairz.txt")); await ensureFile(join(uri.fsPath, "hucairz.txt"));
const srcUri = await findSourceArchive(dir.name); const srcUri = await findSourceArchive(dir.name);
expect(srcUri!.fsPath).toBe(uri.fsPath); expect(srcUri!.fsPath).toBe(uri.fsPath);
}); });
it(`should find source archive in ${name}.zip`, async () => { it(`should find source archive in ${name}.zip`, async () => {
const uri = Uri.file(join(dir.name, `${name}.zip`)); const uri = Uri.file(join(dir.name, `${name}.zip`));
fs.createFileSync(uri.fsPath); await ensureFile(uri.fsPath);
const srcUri = await findSourceArchive(dir.name); const srcUri = await findSourceArchive(dir.name);
expect(srcUri!.fsPath).toBe(uri.fsPath); expect(srcUri!.fsPath).toBe(uri.fsPath);
}); });
it(`should prioritize ${name}.zip over ${name}`, async () => { it(`should prioritize ${name}.zip over ${name}`, async () => {
const uri = Uri.file(join(dir.name, `${name}.zip`)); const uri = Uri.file(join(dir.name, `${name}.zip`));
fs.createFileSync(uri.fsPath); await ensureFile(uri.fsPath);
const uriFolder = Uri.file(join(dir.name, name)); const uriFolder = Uri.file(join(dir.name, name));
fs.createFileSync(join(uriFolder.fsPath, "hucairz.txt")); await ensureFile(join(uriFolder.fsPath, "hucairz.txt"));
const srcUri = await findSourceArchive(dir.name); const srcUri = await findSourceArchive(dir.name);
expect(srcUri!.fsPath).toBe(uri.fsPath); expect(srcUri!.fsPath).toBe(uri.fsPath);
@@ -579,9 +579,9 @@ describe("local databases", () => {
it("should prioritize src over output/src_archive", async () => { it("should prioritize src over output/src_archive", async () => {
const uriSrc = Uri.file(join(dir.name, "src.zip")); const uriSrc = Uri.file(join(dir.name, "src.zip"));
fs.createFileSync(uriSrc.fsPath); await ensureFile(uriSrc.fsPath);
const uriSrcArchive = Uri.file(join(dir.name, "src.zip")); const uriSrcArchive = Uri.file(join(dir.name, "src.zip"));
fs.createFileSync(uriSrcArchive.fsPath); await ensureFile(uriSrcArchive.fsPath);
const resultUri = await findSourceArchive(dir.name); const resultUri = await findSourceArchive(dir.name);
expect(resultUri!.fsPath).toBe(uriSrc.fsPath); expect(resultUri!.fsPath).toBe(uriSrc.fsPath);
@@ -679,8 +679,8 @@ describe("local databases", () => {
}); });
describe("when the QL pack already exists", () => { describe("when the QL pack already exists", () => {
beforeEach(() => { beforeEach(async () => {
fs.mkdirSync(join(dir.name, `codeql-custom-queries-${language}`)); await ensureDir(join(dir.name, `codeql-custom-queries-${language}`));
}); });
it("should exit early", async () => { it("should exit early", async () => {

View File

@@ -177,6 +177,7 @@ const packageConfiguration: PackageConfiguration =
return properties; return properties;
})(); })();
// eslint-disable-next-line import/no-mutable-exports
export let vscodeGetConfigurationMock: jest.SpiedFunction< export let vscodeGetConfigurationMock: jest.SpiedFunction<
typeof workspace.getConfiguration typeof workspace.getConfiguration
>; >;