Merge pull request #1780 from github/koesie10/jest-pure-tests
Convert pure tests to Jest
This commit is contained in:
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@@ -5,7 +5,8 @@
|
||||
"recommendations": [
|
||||
"amodio.tsl-problem-matcher",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode"
|
||||
"esbenp.prettier-vscode",
|
||||
"Orta.vscode-jest",
|
||||
],
|
||||
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
|
||||
"unwantedRecommendations": []
|
||||
|
||||
24
.vscode/launch.json
vendored
24
.vscode/launch.json
vendored
@@ -29,24 +29,16 @@
|
||||
"name": "Launch Unit Tests (vscode-codeql)",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/mocha/bin/_mocha",
|
||||
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
|
||||
"showAsyncStacks": true,
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"runtimeArgs": [
|
||||
"--inspect=9229"
|
||||
],
|
||||
"env": {
|
||||
"LANG": "en-US"
|
||||
"LANG": "en-US",
|
||||
"TZ": "UTC"
|
||||
},
|
||||
"args": [
|
||||
"--exit",
|
||||
"-u",
|
||||
"bdd",
|
||||
"--colors",
|
||||
"--diff",
|
||||
"--config",
|
||||
".mocharc.json",
|
||||
"test/pure-tests/**/*.ts"
|
||||
"--projects",
|
||||
"test"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
@@ -60,6 +52,10 @@
|
||||
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
|
||||
"showAsyncStacks": true,
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"args": [
|
||||
"--projects",
|
||||
"src/view"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
@@ -117,7 +113,7 @@
|
||||
"--disable-extension",
|
||||
"github.copilot",
|
||||
"${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data",
|
||||
// Uncomment the last line and modify the path to a checked out
|
||||
// Uncomment the last line and modify the path to a checked out
|
||||
// instance of the codeql repository so the libraries are
|
||||
// available in the workspace for the tests.
|
||||
// "${workspaceRoot}/../codeql"
|
||||
|
||||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
@@ -37,6 +37,11 @@
|
||||
"javascript.preferences.quoteStyle": "single",
|
||||
"editor.wordWrapColumn": 100,
|
||||
"jest.rootPath": "./extensions/ql-vscode",
|
||||
"jest.autoRun": "watch",
|
||||
"jest.nodeEnv": {
|
||||
"LANG": "en-US",
|
||||
"TZ": "UTC"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"exit": true,
|
||||
"require": ["test/mocha.setup.js"]
|
||||
}
|
||||
@@ -4,5 +4,5 @@
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
projects: ["<rootDir>/src/view"],
|
||||
projects: ["<rootDir>/src/view", "<rootDir>/test"],
|
||||
};
|
||||
|
||||
@@ -1270,7 +1270,7 @@
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:files": "gulp watchTestData",
|
||||
"test": "npm-run-all -p test:*",
|
||||
"test:unit": "mocha --config .mocharc.json 'test/pure-tests/**/*.ts'",
|
||||
"test:unit": "jest --projects test",
|
||||
"test:view": "jest --projects src/view",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
"integration:no-workspace": "node ./out/vscode-tests/run-integration-tests.js no-workspace",
|
||||
|
||||
@@ -2,7 +2,7 @@ import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { cloneDbConfig, DbConfig } from "./db-config";
|
||||
import * as chokidar from "chokidar";
|
||||
import { DisposableObject } from "../../pure/disposable-object";
|
||||
import { DisposableObject, DisposeHandler } from "../../pure/disposable-object";
|
||||
import { DbConfigValidator } from "./db-config-validator";
|
||||
import { ValueResult } from "../../common/value-result";
|
||||
import { App } from "../../common/app";
|
||||
@@ -38,7 +38,8 @@ export class DbConfigStore extends DisposableObject {
|
||||
this.watchConfig();
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
public dispose(disposeHandler?: DisposeHandler): void {
|
||||
super.dispose(disposeHandler);
|
||||
this.configWatcher?.unwatch(this.configPath);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
mocha: true
|
||||
jest: true,
|
||||
},
|
||||
parserOptions: {
|
||||
project: './test/tsconfig.json',
|
||||
project: "./test/tsconfig.json",
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
201
extensions/ql-vscode/test/jest.config.ts
Normal file
201
extensions/ql-vscode/test/jest.config.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import type { Config } from "jest";
|
||||
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property and type check, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
const config: Config = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/private/var/folders/6m/1394pht172qgd7dmw1fwjk100000gn/T/jest_dx",
|
||||
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
// clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: "v8",
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// The default configuration for fake timers
|
||||
// fakeTimers: {
|
||||
// "enableGlobally": false
|
||||
// },
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// Insert Jest's globals (expect, test, describe, beforeEach etc.) into the global environment. If you set this to false, you should import from @jest/globals.
|
||||
// injectGlobals: false,
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: 1,
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
moduleFileExtensions: ["js", "mjs", "cjs", "jsx", "ts", "tsx", "json"],
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: "ts-jest",
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: 'vscode',
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
setupFilesAfterEnv: ["<rootDir>/jest.setup.ts"],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
// testEnvironment: 'jsdom',
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
testMatch: ["**/*.test.[jt]s"],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
"^.+\\.tsx?$": [
|
||||
"ts-jest",
|
||||
{
|
||||
tsconfig: "<rootDir>/tsconfig.json",
|
||||
},
|
||||
],
|
||||
node_modules: [
|
||||
"babel-jest",
|
||||
{
|
||||
presets: ["@babel/preset-env"],
|
||||
plugins: ["@babel/plugin-transform-modules-commonjs"],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// 'transformIgnorePatterns': [
|
||||
// // These use ES modules, so need to be transformed
|
||||
// 'node_modules/(?!(?:@vscode/webview-ui-toolkit|@microsoft/.+|exenv-es6)/.*)'
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
|
||||
export default config;
|
||||
2
extensions/ql-vscode/test/jest.setup.ts
Normal file
2
extensions/ql-vscode/test/jest.setup.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
process.env.TZ = "UTC";
|
||||
process.env.LANG = "en-US";
|
||||
@@ -1,8 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
require('ts-node').register({
|
||||
project: path.resolve(__dirname, 'tsconfig.json')
|
||||
})
|
||||
|
||||
process.env.TZ = 'UTC';
|
||||
process.env.LANG = 'en-US';
|
||||
@@ -1,4 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
|
||||
@@ -8,7 +7,7 @@ type CmdDecl = {
|
||||
title?: string;
|
||||
};
|
||||
|
||||
describe("commands declared in package.json", function () {
|
||||
describe("commands declared in package.json", () => {
|
||||
const manifest = fs.readJsonSync(path.join(__dirname, "../../package.json"));
|
||||
const commands = manifest.contributes.commands;
|
||||
const menus = manifest.contributes.menus;
|
||||
@@ -31,7 +30,7 @@ describe("commands declared in package.json", function () {
|
||||
const { command, title } = commandDecl;
|
||||
if (command.match(/^codeQL\./) || command.match(/^codeQLQueryResults\./)) {
|
||||
paletteCmds.add(command);
|
||||
expect(title).not.to.be.undefined;
|
||||
expect(title).toBeDefined();
|
||||
commandTitles[command] = title!;
|
||||
} else if (
|
||||
command.match(/^codeQLDatabases\./) ||
|
||||
@@ -42,10 +41,10 @@ describe("commands declared in package.json", function () {
|
||||
command.match(/^codeQLTests\./)
|
||||
) {
|
||||
scopedCmds.add(command);
|
||||
expect(title).not.to.be.undefined;
|
||||
expect(title).toBeDefined();
|
||||
commandTitles[command] = title!;
|
||||
} else {
|
||||
expect.fail(`Unexpected command name ${command}`);
|
||||
fail(`Unexpected command name ${command}`);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -66,35 +65,27 @@ describe("commands declared in package.json", function () {
|
||||
disabledInPalette.add(commandDecl.command);
|
||||
});
|
||||
|
||||
it("should have commands appropriately prefixed", function () {
|
||||
it("should have commands appropriately prefixed", () => {
|
||||
paletteCmds.forEach((command) => {
|
||||
expect(
|
||||
commandTitles[command],
|
||||
`command ${command} should be prefixed with 'CodeQL: ', since it is accessible from the command palette`,
|
||||
).to.match(/^CodeQL: /);
|
||||
// command ${command} should be prefixed with 'CodeQL: ', since it is accessible from the command palette
|
||||
expect(commandTitles[command]).toMatch(/^CodeQL: /);
|
||||
});
|
||||
|
||||
contribContextMenuCmds.forEach((command) => {
|
||||
expect(
|
||||
commandTitles[command],
|
||||
`command ${command} should be prefixed with 'CodeQL: ', since it is accessible from a context menu in a non-extension-controlled context`,
|
||||
).to.match(/^CodeQL: /);
|
||||
// command ${command} should be prefixed with 'CodeQL: ', since it is accessible from a context menu in a non-extension-controlled context
|
||||
expect(commandTitles[command]).toMatch(/^CodeQL: /);
|
||||
});
|
||||
|
||||
scopedCmds.forEach((command) => {
|
||||
expect(
|
||||
commandTitles[command],
|
||||
`command ${command} should not be prefixed with 'CodeQL: ', since it is accessible from an extension-controlled context`,
|
||||
).not.to.match(/^CodeQL: /);
|
||||
// command ${command} should not be prefixed with 'CodeQL: ', since it is accessible from an extension-controlled context
|
||||
expect(commandTitles[command]).not.toMatch(/^CodeQL: /);
|
||||
});
|
||||
});
|
||||
|
||||
it("should have the right commands accessible from the command palette", function () {
|
||||
it("should have the right commands accessible from the command palette", () => {
|
||||
paletteCmds.forEach((command) => {
|
||||
expect(
|
||||
disabledInPalette.has(command),
|
||||
`command ${command} should be enabled in the command palette`,
|
||||
).to.be.false;
|
||||
// command ${command} should be enabled in the command palette
|
||||
expect(disabledInPalette.has(command)).toBe(false);
|
||||
});
|
||||
|
||||
// Commands in contribContextMenuCmds may reasonbly be enabled or
|
||||
@@ -103,10 +94,8 @@ describe("commands declared in package.json", function () {
|
||||
// query to run, but codeQL.setCurrentDatabase is not.
|
||||
|
||||
scopedCmds.forEach((command) => {
|
||||
expect(
|
||||
disabledInPalette.has(command),
|
||||
`command ${command} should be disabled in the command palette`,
|
||||
).to.be.true;
|
||||
// command ${command} should be disabled in the command palette
|
||||
expect(disabledInPalette.has(command)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import { DbConfigStore } from "../../../../src/databases/config/db-config-store";
|
||||
import { expect } from "chai";
|
||||
import { createMockApp } from "../../../__mocks__/appMock";
|
||||
|
||||
describe("db config store", async () => {
|
||||
describe("db config store", () => {
|
||||
const extensionPath = path.join(__dirname, "../../../..");
|
||||
const tempWorkspaceStoragePath = path.join(__dirname, "test-workspace");
|
||||
const testDataStoragePath = path.join(__dirname, "data");
|
||||
@@ -31,15 +30,17 @@ describe("db config store", async () => {
|
||||
const configStore = new DbConfigStore(app);
|
||||
await configStore.initialize();
|
||||
|
||||
expect(await fs.pathExists(configPath)).to.be.true;
|
||||
expect(await fs.pathExists(configPath)).toBe(true);
|
||||
|
||||
const config = configStore.getConfig().value;
|
||||
expect(config.databases.remote.repositoryLists).to.be.empty;
|
||||
expect(config.databases.remote.owners).to.be.empty;
|
||||
expect(config.databases.remote.repositories).to.be.empty;
|
||||
expect(config.databases.local.lists).to.be.empty;
|
||||
expect(config.databases.local.databases).to.be.empty;
|
||||
expect(config.selected).to.be.undefined;
|
||||
expect(config.databases.remote.repositoryLists).toHaveLength(0);
|
||||
expect(config.databases.remote.owners).toHaveLength(0);
|
||||
expect(config.databases.remote.repositories).toHaveLength(0);
|
||||
expect(config.databases.local.lists).toHaveLength(0);
|
||||
expect(config.databases.local.databases).toHaveLength(0);
|
||||
expect(config.selected).toBeUndefined();
|
||||
|
||||
configStore.dispose();
|
||||
});
|
||||
|
||||
it("should load an existing config", async () => {
|
||||
@@ -51,20 +52,20 @@ describe("db config store", async () => {
|
||||
await configStore.initialize();
|
||||
|
||||
const config = configStore.getConfig().value;
|
||||
expect(config.databases.remote.repositoryLists).to.have.length(1);
|
||||
expect(config.databases.remote.repositoryLists[0]).to.deep.equal({
|
||||
expect(config.databases.remote.repositoryLists).toHaveLength(1);
|
||||
expect(config.databases.remote.repositoryLists[0]).toEqual({
|
||||
name: "repoList1",
|
||||
repositories: ["foo/bar", "foo/baz"],
|
||||
});
|
||||
expect(config.databases.remote.owners).to.be.empty;
|
||||
expect(config.databases.remote.repositories).to.have.length(3);
|
||||
expect(config.databases.remote.repositories).to.deep.equal([
|
||||
expect(config.databases.remote.owners).toHaveLength(0);
|
||||
expect(config.databases.remote.repositories).toHaveLength(3);
|
||||
expect(config.databases.remote.repositories).toEqual([
|
||||
"owner/repo1",
|
||||
"owner/repo2",
|
||||
"owner/repo3",
|
||||
]);
|
||||
expect(config.databases.local.lists).to.have.length(2);
|
||||
expect(config.databases.local.lists[0]).to.deep.equal({
|
||||
expect(config.databases.local.lists).toHaveLength(2);
|
||||
expect(config.databases.local.lists[0]).toEqual({
|
||||
name: "localList1",
|
||||
databases: [
|
||||
{
|
||||
@@ -75,17 +76,19 @@ describe("db config store", async () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(config.databases.local.databases).to.have.length(1);
|
||||
expect(config.databases.local.databases[0]).to.deep.equal({
|
||||
expect(config.databases.local.databases).toHaveLength(1);
|
||||
expect(config.databases.local.databases[0]).toEqual({
|
||||
name: "example-db",
|
||||
dateAdded: 1668096927267,
|
||||
language: "ruby",
|
||||
storagePath: "/path/to/database/",
|
||||
});
|
||||
expect(config.selected).to.deep.equal({
|
||||
expect(config.selected).toEqual({
|
||||
kind: "configDefined",
|
||||
value: "path.to.database",
|
||||
});
|
||||
|
||||
configStore.dispose();
|
||||
});
|
||||
|
||||
it("should load an existing config without selected db", async () => {
|
||||
@@ -104,7 +107,9 @@ describe("db config store", async () => {
|
||||
await configStore.initialize();
|
||||
|
||||
const config = configStore.getConfig().value;
|
||||
expect(config.selected).to.be.undefined;
|
||||
expect(config.selected).toBeUndefined();
|
||||
|
||||
configStore.dispose();
|
||||
});
|
||||
|
||||
it("should not allow modification of the config", async () => {
|
||||
@@ -119,8 +124,8 @@ describe("db config store", async () => {
|
||||
config.databases.remote.repositoryLists = [];
|
||||
|
||||
const reRetrievedConfig = configStore.getConfig().value;
|
||||
expect(reRetrievedConfig.databases.remote.repositoryLists).to.have.length(
|
||||
1,
|
||||
);
|
||||
expect(reRetrievedConfig.databases.remote.repositoryLists).toHaveLength(1);
|
||||
|
||||
configStore.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { expect } from "chai";
|
||||
import * as path from "path";
|
||||
import { DbConfig } from "../../../../src/databases/config/db-config";
|
||||
import { DbConfigValidator } from "../../../../src/databases/config/db-config-validator";
|
||||
|
||||
describe("db config validation", async () => {
|
||||
const extensionPath = path.join(__dirname, "../../..");
|
||||
describe("db config validation", () => {
|
||||
const extensionPath = path.join(__dirname, "../../../..");
|
||||
const configValidator = new DbConfigValidator(extensionPath);
|
||||
|
||||
it("should return error when file is not valid", async () => {
|
||||
@@ -27,15 +26,15 @@ describe("db config validation", async () => {
|
||||
|
||||
const validationOutput = configValidator.validate(dbConfig);
|
||||
|
||||
expect(validationOutput).to.have.length(3);
|
||||
expect(validationOutput).toHaveLength(3);
|
||||
|
||||
expect(validationOutput[0]).to.deep.equal(
|
||||
expect(validationOutput[0]).toEqual(
|
||||
"/databases must have required property 'local'",
|
||||
);
|
||||
expect(validationOutput[1]).to.deep.equal(
|
||||
expect(validationOutput[1]).toEqual(
|
||||
"/databases/remote must have required property 'owners'",
|
||||
);
|
||||
expect(validationOutput[2]).to.deep.equal(
|
||||
expect(validationOutput[2]).toEqual(
|
||||
"/databases/remote must NOT have additional properties",
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import { DbConfig } from "../../../src/databases/config/db-config";
|
||||
import { DbItemKind } from "../../../src/databases/db-item";
|
||||
import {
|
||||
@@ -26,22 +24,22 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createRemoteTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote);
|
||||
expect(dbTreeRoot.children.length).to.equal(3);
|
||||
expect(dbTreeRoot.children[0]).to.deep.equal({
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
|
||||
expect(dbTreeRoot.children.length).toBe(3);
|
||||
expect(dbTreeRoot.children[0]).toEqual({
|
||||
kind: DbItemKind.RemoteSystemDefinedList,
|
||||
listName: "top_10",
|
||||
listDisplayName: "Top 10 repositories",
|
||||
listDescription: "Top 10 repositories of a language",
|
||||
});
|
||||
expect(dbTreeRoot.children[1]).to.deep.equal({
|
||||
expect(dbTreeRoot.children[1]).toEqual({
|
||||
kind: DbItemKind.RemoteSystemDefinedList,
|
||||
listName: "top_100",
|
||||
listDisplayName: "Top 100 repositories",
|
||||
listDescription: "Top 100 repositories of a language",
|
||||
});
|
||||
expect(dbTreeRoot.children[2]).to.deep.equal({
|
||||
expect(dbTreeRoot.children[2]).toEqual({
|
||||
kind: DbItemKind.RemoteSystemDefinedList,
|
||||
listName: "top_1000",
|
||||
listDisplayName: "Top 1000 repositories",
|
||||
@@ -75,14 +73,14 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createRemoteTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
|
||||
const repositoryListNodes = dbTreeRoot.children.filter(
|
||||
(child) => child.kind === DbItemKind.RemoteUserDefinedList,
|
||||
);
|
||||
|
||||
expect(repositoryListNodes.length).to.equal(2);
|
||||
expect(repositoryListNodes[0]).to.deep.equal({
|
||||
expect(repositoryListNodes.length).toBe(2);
|
||||
expect(repositoryListNodes[0]).toEqual({
|
||||
kind: DbItemKind.RemoteUserDefinedList,
|
||||
listName: dbConfig.databases.remote.repositoryLists[0].name,
|
||||
repos: dbConfig.databases.remote.repositoryLists[0].repositories.map(
|
||||
@@ -92,7 +90,7 @@ describe("db tree creator", () => {
|
||||
}),
|
||||
),
|
||||
});
|
||||
expect(repositoryListNodes[1]).to.deep.equal({
|
||||
expect(repositoryListNodes[1]).toEqual({
|
||||
kind: DbItemKind.RemoteUserDefinedList,
|
||||
listName: dbConfig.databases.remote.repositoryLists[1].name,
|
||||
repos: dbConfig.databases.remote.repositoryLists[1].repositories.map(
|
||||
@@ -121,18 +119,18 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createRemoteTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
|
||||
const ownerNodes = dbTreeRoot.children.filter(
|
||||
(child) => child.kind === DbItemKind.RemoteOwner,
|
||||
);
|
||||
|
||||
expect(ownerNodes.length).to.equal(2);
|
||||
expect(ownerNodes[0]).to.deep.equal({
|
||||
expect(ownerNodes.length).toBe(2);
|
||||
expect(ownerNodes[0]).toEqual({
|
||||
kind: DbItemKind.RemoteOwner,
|
||||
ownerName: dbConfig.databases.remote.owners[0],
|
||||
});
|
||||
expect(ownerNodes[1]).to.deep.equal({
|
||||
expect(ownerNodes[1]).toEqual({
|
||||
kind: DbItemKind.RemoteOwner,
|
||||
ownerName: dbConfig.databases.remote.owners[1],
|
||||
});
|
||||
@@ -155,22 +153,22 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createRemoteTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
|
||||
const repoNodes = dbTreeRoot.children.filter(
|
||||
(child) => child.kind === DbItemKind.RemoteRepo,
|
||||
);
|
||||
|
||||
expect(repoNodes.length).to.equal(3);
|
||||
expect(repoNodes[0]).to.deep.equal({
|
||||
expect(repoNodes.length).toBe(3);
|
||||
expect(repoNodes[0]).toEqual({
|
||||
kind: DbItemKind.RemoteRepo,
|
||||
repoFullName: dbConfig.databases.remote.repositories[0],
|
||||
});
|
||||
expect(repoNodes[1]).to.deep.equal({
|
||||
expect(repoNodes[1]).toEqual({
|
||||
kind: DbItemKind.RemoteRepo,
|
||||
repoFullName: dbConfig.databases.remote.repositories[1],
|
||||
});
|
||||
expect(repoNodes[2]).to.deep.equal({
|
||||
expect(repoNodes[2]).toEqual({
|
||||
kind: DbItemKind.RemoteRepo,
|
||||
repoFullName: dbConfig.databases.remote.repositories[2],
|
||||
});
|
||||
@@ -194,9 +192,9 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createLocalTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal);
|
||||
expect(dbTreeRoot.children.length).to.equal(0);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
|
||||
expect(dbTreeRoot.children.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should create local list nodes", () => {
|
||||
@@ -245,14 +243,14 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createLocalTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
|
||||
const localListNodes = dbTreeRoot.children.filter(
|
||||
(child) => child.kind === DbItemKind.LocalList,
|
||||
);
|
||||
|
||||
expect(localListNodes.length).to.equal(2);
|
||||
expect(localListNodes[0]).to.deep.equal({
|
||||
expect(localListNodes.length).toBe(2);
|
||||
expect(localListNodes[0]).toEqual({
|
||||
kind: DbItemKind.LocalList,
|
||||
listName: dbConfig.databases.local.lists[0].name,
|
||||
databases: dbConfig.databases.local.lists[0].databases.map((db) => ({
|
||||
@@ -263,7 +261,7 @@ describe("db tree creator", () => {
|
||||
storagePath: db.storagePath,
|
||||
})),
|
||||
});
|
||||
expect(localListNodes[1]).to.deep.equal({
|
||||
expect(localListNodes[1]).toEqual({
|
||||
kind: DbItemKind.LocalList,
|
||||
listName: dbConfig.databases.local.lists[1].name,
|
||||
databases: dbConfig.databases.local.lists[1].databases.map((db) => ({
|
||||
@@ -306,21 +304,21 @@ describe("db tree creator", () => {
|
||||
|
||||
const dbTreeRoot = createLocalTree(dbConfig);
|
||||
|
||||
expect(dbTreeRoot).to.be.ok;
|
||||
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal);
|
||||
expect(dbTreeRoot).toBeTruthy();
|
||||
expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
|
||||
const localDatabaseNodes = dbTreeRoot.children.filter(
|
||||
(child) => child.kind === DbItemKind.LocalDatabase,
|
||||
);
|
||||
|
||||
expect(localDatabaseNodes.length).to.equal(2);
|
||||
expect(localDatabaseNodes[0]).to.deep.equal({
|
||||
expect(localDatabaseNodes.length).toBe(2);
|
||||
expect(localDatabaseNodes[0]).toEqual({
|
||||
kind: DbItemKind.LocalDatabase,
|
||||
databaseName: dbConfig.databases.local.databases[0].name,
|
||||
dateAdded: dbConfig.databases.local.databases[0].dateAdded,
|
||||
language: dbConfig.databases.local.databases[0].language,
|
||||
storagePath: dbConfig.databases.local.databases[0].storagePath,
|
||||
});
|
||||
expect(localDatabaseNodes[1]).to.deep.equal({
|
||||
expect(localDatabaseNodes[1]).toEqual({
|
||||
kind: DbItemKind.LocalDatabase,
|
||||
databaseName: dbConfig.databases.local.databases[1].name,
|
||||
dateAdded: dbConfig.databases.local.databases[1].dateAdded,
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import { expect } from "chai";
|
||||
import "mocha";
|
||||
|
||||
import { formatDate } from "../../src/pure/date";
|
||||
|
||||
describe("Date", () => {
|
||||
it("should return a formatted date", () => {
|
||||
expect(formatDate(new Date(1663326904000))).to.eq("Sep 16, 11:15 AM");
|
||||
expect(formatDate(new Date(1631783704000))).to.eq("Sep 16, 2021, 9:15 AM");
|
||||
expect(formatDate(new Date(1663326904000))).toBe("Sep 16, 11:15 AM");
|
||||
expect(formatDate(new Date(1631783704000))).toBe("Sep 16, 2021, 9:15 AM");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,36 +1,31 @@
|
||||
import "chai";
|
||||
import "chai/register-should";
|
||||
import "sinon-chai";
|
||||
import * as sinon from "sinon";
|
||||
import "mocha";
|
||||
|
||||
import { DisposableObject } from "../../src/pure/disposable-object";
|
||||
import { expect } from "chai";
|
||||
|
||||
describe("DisposableObject and DisposeHandler", () => {
|
||||
let disposable1: { dispose: sinon.SinonSpy };
|
||||
let disposable2: { dispose: sinon.SinonSpy };
|
||||
let disposable3: { dispose: sinon.SinonSpy };
|
||||
let disposable4: { dispose: sinon.SinonSpy };
|
||||
const disposable1 = {
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
const disposable2 = {
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
const disposable3 = {
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
const disposable4 = {
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
let disposableObject: any;
|
||||
let nestedDisposableObject: any;
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox.restore();
|
||||
disposable1 = { dispose: sandbox.spy() };
|
||||
disposable2 = { dispose: sandbox.spy() };
|
||||
disposable3 = { dispose: sandbox.spy() };
|
||||
disposable4 = { dispose: sandbox.spy() };
|
||||
disposable1.dispose.mockClear();
|
||||
disposable2.dispose.mockClear();
|
||||
disposable3.dispose.mockClear();
|
||||
disposable4.dispose.mockClear();
|
||||
|
||||
disposableObject = new MyDisposableObject();
|
||||
nestedDisposableObject = new MyDisposableObject();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it("should dispose tracked and pushed objects", () => {
|
||||
disposableObject.push(disposable1);
|
||||
disposableObject.push(disposable2);
|
||||
@@ -39,45 +34,47 @@ describe("DisposableObject and DisposeHandler", () => {
|
||||
|
||||
disposableObject.dispose();
|
||||
|
||||
expect(disposable1.dispose).to.have.been.called;
|
||||
expect(disposable2.dispose).to.have.been.called;
|
||||
expect(disposable3.dispose).to.have.been.called;
|
||||
expect(disposable1.dispose).toBeCalled();
|
||||
expect(disposable2.dispose).toBeCalled();
|
||||
expect(disposable3.dispose).toBeCalled();
|
||||
|
||||
// pushed items must be called in reverse order
|
||||
sinon.assert.callOrder(disposable2.dispose, disposable1.dispose);
|
||||
expect(disposable2.dispose.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
disposable1.dispose.mock.invocationCallOrder[0],
|
||||
);
|
||||
|
||||
// now that disposableObject has been disposed, subsequent disposals are
|
||||
// no-ops
|
||||
disposable1.dispose.resetHistory();
|
||||
disposable2.dispose.resetHistory();
|
||||
disposable3.dispose.resetHistory();
|
||||
disposable1.dispose.mockClear();
|
||||
disposable2.dispose.mockClear();
|
||||
disposable3.dispose.mockClear();
|
||||
|
||||
disposableObject.dispose();
|
||||
|
||||
expect(disposable1.dispose).not.to.have.been.called;
|
||||
expect(disposable2.dispose).not.to.have.been.called;
|
||||
expect(disposable3.dispose).not.to.have.been.called;
|
||||
expect(disposable1.dispose).not.toBeCalled();
|
||||
expect(disposable2.dispose).not.toBeCalled();
|
||||
expect(disposable3.dispose).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should dispose and stop tracking objects", () => {
|
||||
disposableObject.track(disposable1);
|
||||
disposableObject.disposeAndStopTracking(disposable1);
|
||||
|
||||
expect(disposable1.dispose).to.have.been.called;
|
||||
disposable1.dispose.resetHistory();
|
||||
expect(disposable1.dispose).toBeCalled();
|
||||
disposable1.dispose.mockClear();
|
||||
|
||||
disposableObject.dispose();
|
||||
expect(disposable1.dispose).not.to.have.been.called;
|
||||
expect(disposable1.dispose).not.toBeCalled();
|
||||
});
|
||||
|
||||
it("should avoid disposing an object that is not tracked", () => {
|
||||
disposableObject.push(disposable1);
|
||||
disposableObject.disposeAndStopTracking(disposable1);
|
||||
|
||||
expect(disposable1.dispose).not.to.have.been.called;
|
||||
expect(disposable1.dispose).not.toBeCalled();
|
||||
|
||||
disposableObject.dispose();
|
||||
expect(disposable1.dispose).to.have.been.called;
|
||||
expect(disposable1.dispose).toBeCalled();
|
||||
});
|
||||
|
||||
it("ahould use a dispose handler", () => {
|
||||
@@ -94,24 +91,24 @@ describe("DisposableObject and DisposeHandler", () => {
|
||||
|
||||
disposableObject.dispose(handler);
|
||||
|
||||
expect(disposable1.dispose).to.have.been.called;
|
||||
expect(disposable2.dispose).not.to.have.been.called;
|
||||
expect(disposable3.dispose).to.have.been.called;
|
||||
expect(disposable4.dispose).not.to.have.been.called;
|
||||
expect(disposable1.dispose).toBeCalled();
|
||||
expect(disposable2.dispose).not.toBeCalled();
|
||||
expect(disposable3.dispose).toBeCalled();
|
||||
expect(disposable4.dispose).not.toBeCalled();
|
||||
|
||||
// now that disposableObject has been disposed, subsequent disposals are
|
||||
// no-ops
|
||||
disposable1.dispose.resetHistory();
|
||||
disposable2.dispose.resetHistory();
|
||||
disposable3.dispose.resetHistory();
|
||||
disposable4.dispose.resetHistory();
|
||||
disposable1.dispose.mockClear();
|
||||
disposable2.dispose.mockClear();
|
||||
disposable3.dispose.mockClear();
|
||||
disposable4.dispose.mockClear();
|
||||
|
||||
disposableObject.dispose();
|
||||
|
||||
expect(disposable1.dispose).not.to.have.been.called;
|
||||
expect(disposable2.dispose).not.to.have.been.called;
|
||||
expect(disposable3.dispose).not.to.have.been.called;
|
||||
expect(disposable4.dispose).not.to.have.been.called;
|
||||
expect(disposable1.dispose).not.toBeCalled();
|
||||
expect(disposable2.dispose).not.toBeCalled();
|
||||
expect(disposable3.dispose).not.toBeCalled();
|
||||
expect(disposable4.dispose).not.toBeCalled();
|
||||
});
|
||||
|
||||
class MyDisposableObject extends DisposableObject {
|
||||
|
||||
@@ -1,45 +1,36 @@
|
||||
import * as chai from "chai";
|
||||
import "chai/register-should";
|
||||
import * as sinonChai from "sinon-chai";
|
||||
import "mocha";
|
||||
import * as path from "path";
|
||||
import * as chaiAsPromised from "chai-as-promised";
|
||||
|
||||
import {
|
||||
gatherQlFiles,
|
||||
getDirectoryNamesInsidePath,
|
||||
} from "../../src/pure/files";
|
||||
|
||||
chai.use(sinonChai);
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe("files", () => {
|
||||
const dataDir = path.join(path.dirname(__dirname), "data");
|
||||
const data2Dir = path.join(path.dirname(__dirname), "data2");
|
||||
|
||||
describe("gatherQlFiles", async () => {
|
||||
describe("gatherQlFiles", () => {
|
||||
it("should find one file", async () => {
|
||||
const singleFile = path.join(dataDir, "query.ql");
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[singleFile], false]);
|
||||
expect(result).toEqual([[singleFile], false]);
|
||||
});
|
||||
|
||||
it("should find no files", async () => {
|
||||
const result = await gatherQlFiles([]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
expect(result).toEqual([[], false]);
|
||||
});
|
||||
|
||||
it("should find no files", async () => {
|
||||
const singleFile = path.join(dataDir, "library.qll");
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
expect(result).toEqual([[], false]);
|
||||
});
|
||||
|
||||
it("should handle invalid file", async () => {
|
||||
const singleFile = path.join(dataDir, "xxx");
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
expect(result).toEqual([[], false]);
|
||||
});
|
||||
|
||||
it("should find two files", async () => {
|
||||
@@ -54,7 +45,7 @@ describe("files", () => {
|
||||
notFile,
|
||||
invalidFile,
|
||||
]);
|
||||
expect(result.sort()).to.deep.equal([[singleFile, otherFile], false]);
|
||||
expect(result.sort()).toEqual([[singleFile, otherFile], false]);
|
||||
});
|
||||
|
||||
it("should scan a directory", async () => {
|
||||
@@ -63,7 +54,7 @@ describe("files", () => {
|
||||
const file3 = path.join(dataDir, "query.ql");
|
||||
|
||||
const result = await gatherQlFiles([dataDir]);
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
expect(result.sort()).toEqual([[file1, file2, file3], true]);
|
||||
});
|
||||
|
||||
it("should scan a directory and some files", async () => {
|
||||
@@ -72,7 +63,7 @@ describe("files", () => {
|
||||
const empty2File = path.join(data2Dir, "sub-folder", "empty2.ql");
|
||||
|
||||
const result = await gatherQlFiles([singleFile, data2Dir]);
|
||||
expect(result.sort()).to.deep.equal([
|
||||
expect(result.sort()).toEqual([
|
||||
[singleFile, empty1File, empty2File],
|
||||
true,
|
||||
]);
|
||||
@@ -85,27 +76,27 @@ describe("files", () => {
|
||||
|
||||
const result = await gatherQlFiles([file1, dataDir, file3]);
|
||||
result[0].sort();
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
expect(result.sort()).toEqual([[file1, file2, file3], true]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getDirectoryNamesInsidePath", async () => {
|
||||
describe("getDirectoryNamesInsidePath", () => {
|
||||
it("should fail if path does not exist", async () => {
|
||||
await expect(
|
||||
getDirectoryNamesInsidePath("xxx"),
|
||||
).to.eventually.be.rejectedWith("Path does not exist: xxx");
|
||||
await expect(getDirectoryNamesInsidePath("xxx")).rejects.toThrow(
|
||||
"Path does not exist: xxx",
|
||||
);
|
||||
});
|
||||
|
||||
it("should fail if path is not a directory", async () => {
|
||||
const filePath = path.join(data2Dir, "empty1.ql");
|
||||
await expect(
|
||||
getDirectoryNamesInsidePath(filePath),
|
||||
).to.eventually.be.rejectedWith(`Path is not a directory: ${filePath}`);
|
||||
await expect(getDirectoryNamesInsidePath(filePath)).rejects.toThrow(
|
||||
`Path is not a directory: ${filePath}`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should find sub-folders", async () => {
|
||||
const result = await getDirectoryNamesInsidePath(data2Dir);
|
||||
expect(result).to.deep.equal(["sub-folder"]);
|
||||
expect(result).toEqual(["sub-folder"]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { fail } from "assert";
|
||||
import { expect } from "chai";
|
||||
|
||||
import { asyncFilter, getErrorMessage } from "../../src/pure/helpers-pure";
|
||||
|
||||
describe("helpers-pure", () => {
|
||||
it("should filter asynchronously", async () => {
|
||||
expect(
|
||||
await asyncFilter([1, 2, 3], (x) => Promise.resolve(x > 2)),
|
||||
).to.deep.eq([3]);
|
||||
expect(await asyncFilter([1, 2, 3], (x) => Promise.resolve(x > 2))).toEqual(
|
||||
[3],
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw on error when filtering", async () => {
|
||||
@@ -18,7 +15,7 @@ describe("helpers-pure", () => {
|
||||
await asyncFilter([1, 2, 3], rejects);
|
||||
fail("Should have thrown");
|
||||
} catch (e) {
|
||||
expect(getErrorMessage(e)).to.eq("opps");
|
||||
expect(getErrorMessage(e)).toBe("opps");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,26 +1,24 @@
|
||||
import { expect } from "chai";
|
||||
import "mocha";
|
||||
import {
|
||||
tryGetRemoteLocation,
|
||||
tryGetResolvableLocation,
|
||||
} from "../../src/pure/bqrs-utils";
|
||||
|
||||
describe("processing string locations", function () {
|
||||
it("should detect Windows whole-file locations", function () {
|
||||
describe("processing string locations", () => {
|
||||
it("should detect Windows whole-file locations", () => {
|
||||
const loc = "file://C:/path/to/file.ext:0:0:0:0";
|
||||
const wholeFileLoc = tryGetResolvableLocation(loc);
|
||||
expect(wholeFileLoc).to.eql({ uri: "C:/path/to/file.ext" });
|
||||
expect(wholeFileLoc).toEqual({ uri: "C:/path/to/file.ext" });
|
||||
});
|
||||
it("should detect Unix whole-file locations", function () {
|
||||
it("should detect Unix whole-file locations", () => {
|
||||
const loc = "file:///path/to/file.ext:0:0:0:0";
|
||||
const wholeFileLoc = tryGetResolvableLocation(loc);
|
||||
expect(wholeFileLoc).to.eql({ uri: "/path/to/file.ext" });
|
||||
expect(wholeFileLoc).toEqual({ uri: "/path/to/file.ext" });
|
||||
});
|
||||
|
||||
it("should detect Unix 5-part locations", function () {
|
||||
it("should detect Unix 5-part locations", () => {
|
||||
const loc = "file:///path/to/file.ext:1:2:3:4";
|
||||
const wholeFileLoc = tryGetResolvableLocation(loc);
|
||||
expect(wholeFileLoc).to.eql({
|
||||
expect(wholeFileLoc).toEqual({
|
||||
uri: "/path/to/file.ext",
|
||||
startLine: 1,
|
||||
startColumn: 2,
|
||||
@@ -28,16 +26,16 @@ describe("processing string locations", function () {
|
||||
endColumn: 4,
|
||||
});
|
||||
});
|
||||
it("should ignore other string locations", function () {
|
||||
it("should ignore other string locations", () => {
|
||||
for (const loc of ["file:///path/to/file.ext", "I am not a location"]) {
|
||||
const wholeFileLoc = tryGetResolvableLocation(loc);
|
||||
expect(wholeFileLoc).to.be.undefined;
|
||||
expect(wholeFileLoc).toBeUndefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("getting links to remote (GitHub) locations", function () {
|
||||
it("should return undefined if resolvableLocation is undefined", function () {
|
||||
describe("getting links to remote (GitHub) locations", () => {
|
||||
it("should return undefined if resolvableLocation is undefined", () => {
|
||||
const loc = "not a location";
|
||||
const fileLinkPrefix = "";
|
||||
const sourceLocationPrefix = "";
|
||||
@@ -48,10 +46,10 @@ describe("getting links to remote (GitHub) locations", function () {
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
|
||||
expect(link).to.be.undefined;
|
||||
expect(link).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if resolvableLocation has the wrong format", function () {
|
||||
it("should return undefined if resolvableLocation has the wrong format", () => {
|
||||
const loc = {
|
||||
uri: "file:/path/to/file.ext",
|
||||
startLine: 194,
|
||||
@@ -68,10 +66,10 @@ describe("getting links to remote (GitHub) locations", function () {
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
|
||||
expect(link).to.be.undefined;
|
||||
expect(link).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return a remote file ref if the sourceLocationPrefix and resolvableLocation match up", function () {
|
||||
it("should return a remote file ref if the sourceLocationPrefix and resolvableLocation match up", () => {
|
||||
const loc = {
|
||||
uri: "file:/home/foo/bar/path/to/file.ext",
|
||||
startLine: 194,
|
||||
@@ -88,12 +86,12 @@ describe("getting links to remote (GitHub) locations", function () {
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
|
||||
expect(link).to.eql(
|
||||
expect(link).toEqual(
|
||||
"https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237",
|
||||
);
|
||||
});
|
||||
|
||||
it("should return undefined if the sourceLocationPrefix is missing and resolvableLocation doesn't match the default format", function () {
|
||||
it("should return undefined if the sourceLocationPrefix is missing and resolvableLocation doesn't match the default format", () => {
|
||||
const loc = {
|
||||
uri: "file:/home/foo/bar/path/to/file.ext",
|
||||
startLine: 194,
|
||||
@@ -110,10 +108,10 @@ describe("getting links to remote (GitHub) locations", function () {
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
|
||||
expect(link).to.eql(undefined);
|
||||
expect(link).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return a remote file ref if the sourceLocationPrefix is missing, but the resolvableLocation matches the default format", function () {
|
||||
it("should return a remote file ref if the sourceLocationPrefix is missing, but the resolvableLocation matches the default format", () => {
|
||||
const loc = {
|
||||
uri: "file:/home/runner/work/foo/bar/path/to/file.ext",
|
||||
startLine: 194,
|
||||
@@ -130,7 +128,7 @@ describe("getting links to remote (GitHub) locations", function () {
|
||||
sourceLocationPrefix,
|
||||
);
|
||||
|
||||
expect(link).to.eql(
|
||||
expect(link).toEqual(
|
||||
"https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237",
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
import "mocha";
|
||||
import {
|
||||
EvaluationLogProblemReporter,
|
||||
EvaluationLogScannerSet,
|
||||
@@ -36,8 +34,8 @@ class TestProblemReporter implements EvaluationLogProblemReporter {
|
||||
}
|
||||
}
|
||||
|
||||
describe("log scanners", function () {
|
||||
it("should detect bad join orders", async function () {
|
||||
describe("log scanners", () => {
|
||||
it("should detect bad join orders", async () => {
|
||||
const scanners = new EvaluationLogScannerSet();
|
||||
scanners.registerLogScannerProvider(new JoinOrderScannerProvider(() => 50));
|
||||
const summaryPath = path.join(
|
||||
@@ -47,13 +45,13 @@ describe("log scanners", function () {
|
||||
const problemReporter = new TestProblemReporter();
|
||||
await scanners.scanLog(summaryPath, problemReporter);
|
||||
|
||||
expect(problemReporter.problems.length).to.equal(1);
|
||||
expect(problemReporter.problems[0].predicateName).to.equal("#select#ff");
|
||||
expect(problemReporter.problems[0].raHash).to.equal(
|
||||
expect(problemReporter.problems.length).toBe(1);
|
||||
expect(problemReporter.problems[0].predicateName).toBe("#select#ff");
|
||||
expect(problemReporter.problems[0].raHash).toBe(
|
||||
"1bb43c97jpmuh8r2v0f9hktim63",
|
||||
);
|
||||
expect(problemReporter.problems[0].iteration).to.equal(0);
|
||||
expect(problemReporter.problems[0].message).to.equal(
|
||||
expect(problemReporter.problems[0].iteration).toBe(0);
|
||||
expect(problemReporter.problems[0].message).toBe(
|
||||
"Relation '#select#ff' has an inefficient join order. Its join order metric is 4961.83, which is larger than the threshold of 50.00.",
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,49 +1,47 @@
|
||||
import { expect } from "chai";
|
||||
import * as path from "path";
|
||||
import "mocha";
|
||||
|
||||
import { parseViewerData } from "../../src/pure/log-summary-parser";
|
||||
|
||||
describe("Evaluator log summary tests", async function () {
|
||||
describe("for a valid summary text", async function () {
|
||||
it("should return only valid EvalLogData objects", async function () {
|
||||
describe("Evaluator log summary tests", () => {
|
||||
describe("for a valid summary text", () => {
|
||||
it("should return only valid EvalLogData objects", async () => {
|
||||
const validSummaryPath = path.join(
|
||||
__dirname,
|
||||
"evaluator-log-summaries/valid-summary.jsonl",
|
||||
);
|
||||
const logDataItems = await parseViewerData(validSummaryPath);
|
||||
expect(logDataItems).to.not.be.undefined;
|
||||
expect(logDataItems.length).to.eq(3);
|
||||
expect(logDataItems).toBeDefined();
|
||||
expect(logDataItems.length).toBe(3);
|
||||
for (const item of logDataItems) {
|
||||
expect(item.predicateName).to.not.be.empty;
|
||||
expect(item.millis).to.be.a("number");
|
||||
expect(item.resultSize).to.be.a("number");
|
||||
expect(item.ra).to.not.be.undefined;
|
||||
expect(item.ra).to.not.be.empty;
|
||||
expect(item.predicateName).not.toHaveLength(0);
|
||||
expect(item.millis).toEqual(expect.any(Number));
|
||||
expect(item.resultSize).toEqual(expect.any(Number));
|
||||
expect(item.ra).toBeDefined();
|
||||
expect(Object.keys(item.ra)).not.toHaveLength(0);
|
||||
for (const [pipeline, steps] of Object.entries(item.ra)) {
|
||||
expect(pipeline).to.not.be.empty;
|
||||
expect(steps).to.not.be.undefined;
|
||||
expect(steps.length).to.be.greaterThan(0);
|
||||
expect(Object.keys(pipeline)).not.toHaveLength(0);
|
||||
expect(steps).toBeDefined();
|
||||
expect(steps.length).toBeGreaterThan(0);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it("should not parse a summary header object", async function () {
|
||||
it("should not parse a summary header object", async () => {
|
||||
const invalidHeaderPath = path.join(
|
||||
__dirname,
|
||||
"evaluator-log-summaries/invalid-header.jsonl",
|
||||
);
|
||||
const logDataItems = await parseViewerData(invalidHeaderPath);
|
||||
expect(logDataItems.length).to.eq(0);
|
||||
expect(logDataItems.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should not parse a log event missing RA or millis fields", async function () {
|
||||
it("should not parse a log event missing RA or millis fields", async () => {
|
||||
const invalidSummaryPath = path.join(
|
||||
__dirname,
|
||||
"evaluator-log-summaries/invalid-summary.jsonl",
|
||||
);
|
||||
const logDataItems = await parseViewerData(invalidSummaryPath);
|
||||
expect(logDataItems.length).to.eq(0);
|
||||
expect(logDataItems.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,26 +1,37 @@
|
||||
import "chai/register-should";
|
||||
import * as chai from "chai";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp";
|
||||
import "mocha";
|
||||
import * as sinonChai from "sinon-chai";
|
||||
import * as sinon from "sinon";
|
||||
import * as pq from "proxyquire";
|
||||
import { OutputChannelLogger } from "../../src/logging";
|
||||
|
||||
const proxyquire = pq.noPreserveCache().noCallThru();
|
||||
chai.use(sinonChai);
|
||||
const expect = chai.expect;
|
||||
jest.setTimeout(999999);
|
||||
|
||||
jest.mock(
|
||||
"vscode",
|
||||
() => {
|
||||
const mockOutputChannel = {
|
||||
append: jest.fn(),
|
||||
appendLine: jest.fn(),
|
||||
show: jest.fn(),
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
|
||||
return {
|
||||
window: {
|
||||
createOutputChannel: () => mockOutputChannel,
|
||||
},
|
||||
mockOutputChannel,
|
||||
};
|
||||
},
|
||||
{
|
||||
virtual: true,
|
||||
},
|
||||
);
|
||||
|
||||
describe("OutputChannelLogger tests", function () {
|
||||
this.timeout(999999);
|
||||
let OutputChannelLogger;
|
||||
const tempFolders: Record<string, tmp.DirResult> = {};
|
||||
let logger: any;
|
||||
let mockOutputChannel: Record<string, sinon.SinonStub>;
|
||||
|
||||
beforeEach(async () => {
|
||||
OutputChannelLogger = createModule().OutputChannelLogger;
|
||||
tempFolders.globalStoragePath = tmp.dirSync({
|
||||
prefix: "logging-tests-global",
|
||||
});
|
||||
@@ -35,21 +46,22 @@ describe("OutputChannelLogger tests", function () {
|
||||
tempFolders.storagePath.removeCallback();
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const mockOutputChannel = require("vscode").mockOutputChannel;
|
||||
|
||||
it("should log to the output channel", async () => {
|
||||
await logger.log("xxx");
|
||||
expect(mockOutputChannel.appendLine).to.have.been.calledWith("xxx");
|
||||
expect(mockOutputChannel.append).not.to.have.been.calledWith("xxx");
|
||||
expect(mockOutputChannel.appendLine).toBeCalledWith("xxx");
|
||||
expect(mockOutputChannel.append).not.toBeCalledWith("xxx");
|
||||
|
||||
await logger.log("yyy", { trailingNewline: false });
|
||||
expect(mockOutputChannel.appendLine).not.to.have.been.calledWith("yyy");
|
||||
expect(mockOutputChannel.append).to.have.been.calledWith("yyy");
|
||||
expect(mockOutputChannel.appendLine).not.toBeCalledWith("yyy");
|
||||
expect(mockOutputChannel.append).toBeCalledWith("yyy");
|
||||
|
||||
await logger.log("zzz", createLogOptions("hucairz"));
|
||||
|
||||
// should have created 1 side log
|
||||
expect(fs.readdirSync(tempFolders.storagePath.name)).to.deep.equal([
|
||||
"hucairz",
|
||||
]);
|
||||
expect(fs.readdirSync(tempFolders.storagePath.name)).toEqual(["hucairz"]);
|
||||
});
|
||||
|
||||
it("should create a side log", async () => {
|
||||
@@ -59,42 +71,20 @@ describe("OutputChannelLogger tests", function () {
|
||||
await logger.log("aaa");
|
||||
|
||||
// expect 2 side logs
|
||||
expect(fs.readdirSync(tempFolders.storagePath.name).length).to.equal(2);
|
||||
expect(fs.readdirSync(tempFolders.storagePath.name).length).toBe(2);
|
||||
|
||||
// contents
|
||||
expect(
|
||||
fs.readFileSync(path.join(tempFolders.storagePath.name, "first"), "utf8"),
|
||||
).to.equal("xxx\nzzz");
|
||||
).toBe("xxx\nzzz");
|
||||
expect(
|
||||
fs.readFileSync(
|
||||
path.join(tempFolders.storagePath.name, "second"),
|
||||
"utf8",
|
||||
),
|
||||
).to.equal("yyy\n");
|
||||
).toBe("yyy\n");
|
||||
});
|
||||
|
||||
function createModule(): any {
|
||||
mockOutputChannel = {
|
||||
append: sinon.stub(),
|
||||
appendLine: sinon.stub(),
|
||||
show: sinon.stub(),
|
||||
dispose: sinon.stub(),
|
||||
};
|
||||
|
||||
return proxyquire("../../src/logging", {
|
||||
vscode: {
|
||||
window: {
|
||||
createOutputChannel: () => mockOutputChannel,
|
||||
},
|
||||
Disposable: function () {
|
||||
/**/
|
||||
},
|
||||
"@noCallThru": true,
|
||||
"@global": true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function createLogOptions(
|
||||
additionalLogLocation: string,
|
||||
trailingNewline?: boolean,
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { expect } from "chai";
|
||||
import "mocha";
|
||||
|
||||
import { formatDecimal } from "../../src/pure/number";
|
||||
|
||||
describe("Number", () => {
|
||||
it("should return a formatted decimal", () => {
|
||||
expect(formatDecimal(9)).to.eq("9");
|
||||
expect(formatDecimal(10_000)).to.eq("10,000");
|
||||
expect(formatDecimal(100_000_000_000)).to.eq("100,000,000,000");
|
||||
expect(formatDecimal(9)).toBe("9");
|
||||
expect(formatDecimal(10_000)).toBe("10,000");
|
||||
expect(formatDecimal(100_000_000_000)).toBe("100,000,000,000");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
|
||||
@@ -25,9 +23,9 @@ const mockCredentials = {
|
||||
} as unknown as Credentials;
|
||||
|
||||
const mockServer = new MockGitHubApiServer();
|
||||
before(() => mockServer.startServer());
|
||||
beforeAll(() => mockServer.startServer());
|
||||
afterEach(() => mockServer.unloadScenario());
|
||||
after(() => mockServer.stopServer());
|
||||
afterAll(() => mockServer.stopServer());
|
||||
|
||||
const controllerRepoId = variantAnalysisJson.response.body.controller_repo.id;
|
||||
const variantAnalysisId = variantAnalysisJson.response.body.id;
|
||||
@@ -42,8 +40,8 @@ describe("submitVariantAnalysis", () => {
|
||||
createMockSubmission(),
|
||||
);
|
||||
|
||||
expect(result).not.to.be.undefined;
|
||||
expect(result.id).to.eq(variantAnalysisId);
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBe(variantAnalysisId);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -57,8 +55,8 @@ describe("getVariantAnalysis", () => {
|
||||
variantAnalysisId,
|
||||
);
|
||||
|
||||
expect(result).not.to.be.undefined;
|
||||
expect(result.status).not.to.be.undefined;
|
||||
expect(result).toBeDefined();
|
||||
expect(result.status).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -73,8 +71,8 @@ describe("getVariantAnalysisRepo", () => {
|
||||
repoTaskId,
|
||||
);
|
||||
|
||||
expect(result).not.to.be.undefined;
|
||||
expect(result.repository.id).to.eq(repoTaskId);
|
||||
expect(result).toBeDefined();
|
||||
expect(result.repository.id).toBe(repoTaskId);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -87,9 +85,9 @@ describe("getVariantAnalysisRepoResult", () => {
|
||||
`https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/${variantAnalysisId}/${repoTaskId}/${faker.datatype.uuid()}`,
|
||||
);
|
||||
|
||||
expect(result).not.to.be.undefined;
|
||||
expect(result).to.be.an("ArrayBuffer");
|
||||
expect(result.byteLength).to.eq(
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toBeInstanceOf(ArrayBuffer);
|
||||
expect(result.byteLength).toBe(
|
||||
variantAnalysisRepoJson.response.body.artifact_size_in_bytes,
|
||||
);
|
||||
});
|
||||
@@ -105,7 +103,7 @@ describe("getRepositoryFromNwo", () => {
|
||||
"mrva-demo-controller-repo",
|
||||
);
|
||||
|
||||
expect(result).not.to.be.undefined;
|
||||
expect(result.id).to.eq(getRepoJson.response.body.id);
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBe(getRepoJson.response.body.id);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs-extra";
|
||||
import {
|
||||
@@ -6,9 +5,9 @@ import {
|
||||
MarkdownFile,
|
||||
} from "../../../../src/remote-queries/remote-queries-markdown-generation";
|
||||
|
||||
describe("markdown generation", async function () {
|
||||
describe("for path-problem query", async function () {
|
||||
it("should generate markdown file for each repo with results", async function () {
|
||||
describe("markdown generation", () => {
|
||||
describe("for path-problem query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const pathProblemQuery = JSON.parse(
|
||||
await fs.readFile(
|
||||
path.join(
|
||||
@@ -42,8 +41,8 @@ describe("markdown generation", async function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe("for problem query", async function () {
|
||||
it("should generate markdown file for each repo with results", async function () {
|
||||
describe("for problem query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const problemQuery = JSON.parse(
|
||||
await fs.readFile(
|
||||
path.join(
|
||||
@@ -76,8 +75,8 @@ describe("markdown generation", async function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe("for non-alert query", async function () {
|
||||
it("should generate markdown file for each repo with results", async function () {
|
||||
describe("for non-alert query", () => {
|
||||
it("should generate markdown file for each repo with results", async () => {
|
||||
const query = JSON.parse(
|
||||
await fs.readFile(
|
||||
path.join(__dirname, "data/raw-results/query.json"),
|
||||
@@ -118,16 +117,16 @@ async function checkGeneratedMarkdown(
|
||||
const expectedDir = path.join(__dirname, testDataBasePath);
|
||||
const expectedFiles = await fs.readdir(expectedDir);
|
||||
|
||||
expect(actualFiles.length).to.equal(expectedFiles.length);
|
||||
expect(actualFiles.length).toBe(expectedFiles.length);
|
||||
|
||||
for (const expectedFile of expectedFiles) {
|
||||
const actualFile = actualFiles.find(
|
||||
(f) => `${f.fileName}.md` === expectedFile,
|
||||
);
|
||||
expect(actualFile).to.not.be.undefined;
|
||||
expect(actualFile).toBeDefined();
|
||||
const expectedContent = await readTestOutputFile(
|
||||
path.join(testDataBasePath, expectedFile),
|
||||
);
|
||||
expect(actualFile!.content.join("\n")).to.equal(expectedContent);
|
||||
expect(actualFile!.content.join("\n")).toBe(expectedContent);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
import { faker } from "@faker-js/faker";
|
||||
import { VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository } from "../../../src/remote-queries/gh-api/variant-analysis";
|
||||
import {
|
||||
@@ -20,7 +19,7 @@ import { createMockApiResponse } from "../../../src/vscode-tests/factories/remot
|
||||
import { createMockSubmission } from "../../../src/vscode-tests/factories/remote-queries/shared/variant-analysis-submission";
|
||||
import { createMockVariantAnalysisRepoTask } from "../../../src/vscode-tests/factories/remote-queries/gh-api/variant-analysis-repo-task";
|
||||
|
||||
describe(processVariantAnalysis.name, function () {
|
||||
describe(processVariantAnalysis.name, () => {
|
||||
const scannedRepos = createMockScannedRepos();
|
||||
const skippedRepos = createMockSkippedRepos();
|
||||
const mockApiResponse = createMockApiResponse(
|
||||
@@ -40,7 +39,7 @@ describe(processVariantAnalysis.name, function () {
|
||||
over_limit_repos,
|
||||
} = skippedRepos;
|
||||
|
||||
expect(result).to.eql({
|
||||
expect(result).toEqual({
|
||||
id: mockApiResponse.id,
|
||||
controllerRepo: {
|
||||
id: mockApiResponse.controller_repo.id,
|
||||
@@ -179,7 +178,7 @@ describe(processVariantAnalysisRepositoryTask.name, () => {
|
||||
const mockApiResponse = createMockVariantAnalysisRepoTask();
|
||||
|
||||
it("should return the correct result", () => {
|
||||
expect(processVariantAnalysisRepositoryTask(mockApiResponse)).to.deep.eq({
|
||||
expect(processVariantAnalysisRepositoryTask(mockApiResponse)).toEqual({
|
||||
repository: {
|
||||
id: mockApiResponse.repository.id,
|
||||
fullName: mockApiResponse.repository.full_name,
|
||||
@@ -204,7 +203,7 @@ describe(processScannedRepository.name, () => {
|
||||
);
|
||||
|
||||
it("should return the correct result", () => {
|
||||
expect(processScannedRepository(mockApiResponse)).to.deep.eq({
|
||||
expect(processScannedRepository(mockApiResponse)).toEqual({
|
||||
repository: {
|
||||
id: mockApiResponse.repository.id,
|
||||
fullName: mockApiResponse.repository.full_name,
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import "mocha";
|
||||
import * as chaiAsPromised from "chai-as-promised";
|
||||
import * as chai from "chai";
|
||||
import * as sarif from "sarif";
|
||||
import {
|
||||
extractAnalysisAlerts,
|
||||
@@ -12,9 +9,6 @@ import {
|
||||
AnalysisMessageLocationToken,
|
||||
} from "../../src/remote-queries/shared/analysis-result";
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe("SARIF processing", () => {
|
||||
describe("tryGetRule", () => {
|
||||
describe("Using the tool driver", () => {
|
||||
@@ -30,7 +24,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if rule missing from tool driver", () => {
|
||||
@@ -60,7 +54,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return rule if it has been set on the tool driver", () => {
|
||||
@@ -87,8 +81,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.ok;
|
||||
expect(rule!.id).to.equal(result!.rule!.id);
|
||||
expect(rule).toBeTruthy();
|
||||
expect(rule!.id).toBe(result!.rule!.id);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -136,7 +130,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if tool component index not set", () => {
|
||||
@@ -182,7 +176,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if tool extensions not set", () => {
|
||||
@@ -205,7 +199,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if tool extensions do not contain index", () => {
|
||||
@@ -241,7 +235,7 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.undefined;
|
||||
expect(rule).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return rule if all information is defined", () => {
|
||||
@@ -288,8 +282,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
|
||||
expect(rule).to.be.ok;
|
||||
expect(rule!.id).to.equal("D");
|
||||
expect(rule).toBeTruthy();
|
||||
expect(rule!.id).toBe("D");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -308,7 +302,7 @@ describe("SARIF processing", () => {
|
||||
} as sarif.Run;
|
||||
|
||||
const severity = tryGetSeverity(sarifRun, result, rule);
|
||||
expect(severity).to.be.undefined;
|
||||
expect(severity).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined if severity not set on rule", () => {
|
||||
@@ -336,7 +330,7 @@ describe("SARIF processing", () => {
|
||||
} as sarif.Run;
|
||||
|
||||
const severity = tryGetSeverity(sarifRun, result, rule);
|
||||
expect(severity).to.be.undefined;
|
||||
expect(severity).toBeUndefined();
|
||||
});
|
||||
|
||||
const severityMap = {
|
||||
@@ -371,7 +365,7 @@ describe("SARIF processing", () => {
|
||||
} as sarif.Run;
|
||||
|
||||
const severity = tryGetSeverity(sarifRun, result, rule);
|
||||
expect(severity).to.equal(parsedSeverity);
|
||||
expect(severity).toBe(parsedSeverity);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -385,8 +379,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.alerts.length).to.equal(0);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.alerts.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should not return any results for runs that have no results", () => {
|
||||
@@ -403,8 +397,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.alerts.length).to.equal(0);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.alerts.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should return errors for results that have no message", () => {
|
||||
@@ -413,8 +407,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(1);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.errors.length).toBe(1);
|
||||
expectResultParsingError(result.errors[0]);
|
||||
});
|
||||
|
||||
@@ -433,9 +427,9 @@ describe("SARIF processing", () => {
|
||||
|
||||
const actualCodeSnippet = result.alerts[0].codeSnippet;
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result).toBeTruthy();
|
||||
expectNoParsingError(result);
|
||||
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet);
|
||||
expect(actualCodeSnippet).toEqual(expectedCodeSnippet);
|
||||
});
|
||||
|
||||
it("should use highlightedRegion for result locations with no contextRegion", () => {
|
||||
@@ -453,9 +447,9 @@ describe("SARIF processing", () => {
|
||||
|
||||
const actualCodeSnippet = result.alerts[0].codeSnippet;
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result).toBeTruthy();
|
||||
expectNoParsingError(result);
|
||||
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet);
|
||||
expect(actualCodeSnippet).toEqual(expectedCodeSnippet);
|
||||
});
|
||||
|
||||
it("should not return errors for result locations with no region", () => {
|
||||
@@ -465,8 +459,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.alerts.length).to.equal(1);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.alerts.length).toBe(1);
|
||||
expectNoParsingError(result);
|
||||
});
|
||||
|
||||
@@ -477,8 +471,8 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(1);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.errors.length).toBe(1);
|
||||
expectResultParsingError(result.errors[0]);
|
||||
});
|
||||
|
||||
@@ -565,31 +559,31 @@ describe("SARIF processing", () => {
|
||||
} as sarif.Log;
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(0);
|
||||
expect(result.alerts.length).to.equal(3);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.errors.length).toBe(0);
|
||||
expect(result.alerts.length).toBe(3);
|
||||
expect(
|
||||
result.alerts.find(
|
||||
(a) =>
|
||||
getMessageText(a.message) === "msg1" &&
|
||||
a.codeSnippet!.text === "foo",
|
||||
),
|
||||
).to.be.ok;
|
||||
).toBeTruthy();
|
||||
expect(
|
||||
result.alerts.find(
|
||||
(a) =>
|
||||
getMessageText(a.message) === "msg1" &&
|
||||
a.codeSnippet!.text === "bar",
|
||||
),
|
||||
).to.be.ok;
|
||||
).toBeTruthy();
|
||||
expect(
|
||||
result.alerts.find(
|
||||
(a) =>
|
||||
getMessageText(a.message) === "msg2" &&
|
||||
a.codeSnippet!.text === "baz",
|
||||
),
|
||||
).to.be.ok;
|
||||
expect(result.alerts.every((a) => a.severity === "Warning")).to.be.true;
|
||||
).toBeTruthy();
|
||||
expect(result.alerts.every((a) => a.severity === "Warning")).toBe(true);
|
||||
});
|
||||
|
||||
it("should deal with complex messages", () => {
|
||||
@@ -615,20 +609,20 @@ describe("SARIF processing", () => {
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(0);
|
||||
expect(result.alerts.length).to.equal(1);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.errors.length).toBe(0);
|
||||
expect(result.alerts.length).toBe(1);
|
||||
const message = result.alerts[0].message;
|
||||
expect(message.tokens.length).to.equal(3);
|
||||
expect(message.tokens[0].t).to.equal("text");
|
||||
expect(message.tokens[0].text).to.equal(
|
||||
expect(message.tokens.length).toBe(3);
|
||||
expect(message.tokens[0].t).toBe("text");
|
||||
expect(message.tokens[0].text).toBe(
|
||||
"This shell command depends on an uncontrolled ",
|
||||
);
|
||||
expect(message.tokens[1].t).to.equal("location");
|
||||
expect(message.tokens[1].text).to.equal("absolute path");
|
||||
expect(message.tokens[1].t).toBe("location");
|
||||
expect(message.tokens[1].text).toBe("absolute path");
|
||||
expect(
|
||||
(message.tokens[1] as AnalysisMessageLocationToken).location,
|
||||
).to.deep.equal({
|
||||
).toEqual({
|
||||
fileLink: {
|
||||
fileLinkPrefix: fakefileLinkPrefix,
|
||||
filePath: "npm-packages/meteor-installer/config.js",
|
||||
@@ -640,18 +634,18 @@ describe("SARIF processing", () => {
|
||||
endColumn: 60,
|
||||
},
|
||||
});
|
||||
expect(message.tokens[2].t).to.equal("text");
|
||||
expect(message.tokens[2].text).to.equal(".");
|
||||
expect(message.tokens[2].t).toBe("text");
|
||||
expect(message.tokens[2].text).toBe(".");
|
||||
});
|
||||
});
|
||||
|
||||
function expectResultParsingError(msg: string) {
|
||||
expect(msg.startsWith("Error when processing SARIF result")).to.be.true;
|
||||
expect(msg.startsWith("Error when processing SARIF result")).toBe(true);
|
||||
}
|
||||
|
||||
function expectNoParsingError(result: { errors: string[] }) {
|
||||
const array = result.errors;
|
||||
expect(array.length, array.join()).to.equal(0);
|
||||
expect(array).toEqual([]);
|
||||
}
|
||||
|
||||
function buildValidSarifLog(): sarif.Log {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import "mocha";
|
||||
import { expect } from "chai";
|
||||
import * as Sarif from "sarif";
|
||||
|
||||
import {
|
||||
@@ -10,74 +8,72 @@ import {
|
||||
} from "../../src/pure/sarif-utils";
|
||||
|
||||
describe("parsing sarif", () => {
|
||||
it("should be able to parse a simple message from the spec", async function () {
|
||||
it("should be able to parse a simple message from the spec", async () => {
|
||||
const message = "Tainted data was used. The data came from [here](3).";
|
||||
const results = parseSarifPlainTextMessage(message);
|
||||
expect(results).to.deep.equal([
|
||||
expect(results).toEqual([
|
||||
"Tainted data was used. The data came from ",
|
||||
{ dest: 3, text: "here" },
|
||||
".",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should be able to parse a complex message from the spec", async function () {
|
||||
it("should be able to parse a complex message from the spec", async () => {
|
||||
const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\]](1).";
|
||||
const results = parseSarifPlainTextMessage(message);
|
||||
expect(results).to.deep.equal([
|
||||
expect(results).toEqual([
|
||||
"Prohibited term used in ",
|
||||
{ dest: 1, text: "para[0]\\spans[2]" },
|
||||
".",
|
||||
]);
|
||||
});
|
||||
it("should be able to parse a broken complex message from the spec", async function () {
|
||||
it("should be able to parse a broken complex message from the spec", async () => {
|
||||
const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\](1).";
|
||||
const results = parseSarifPlainTextMessage(message);
|
||||
expect(results).to.deep.equal([
|
||||
"Prohibited term used in [para[0]\\spans[2](1).",
|
||||
]);
|
||||
expect(results).toEqual(["Prohibited term used in [para[0]\\spans[2](1)."]);
|
||||
});
|
||||
it("should be able to parse a message with extra escaping the spec", async function () {
|
||||
it("should be able to parse a message with extra escaping the spec", async () => {
|
||||
const message = "Tainted data was used. The data came from \\[here](3).";
|
||||
const results = parseSarifPlainTextMessage(message);
|
||||
expect(results).to.deep.equal([
|
||||
expect(results).toEqual([
|
||||
"Tainted data was used. The data came from [here](3).",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should unescape sarif text", () => {
|
||||
expect(unescapeSarifText("\\\\ \\\\ \\[ \\[ \\] \\]")).to.eq(
|
||||
expect(unescapeSarifText("\\\\ \\\\ \\[ \\[ \\] \\]")).toBe(
|
||||
"\\ \\ [ [ ] ]",
|
||||
);
|
||||
// Also show that unescaped special chars are unchanged...is this correct?
|
||||
expect(unescapeSarifText("\\ \\ [ [ ] ]")).to.eq("\\ \\ [ [ ] ]");
|
||||
expect(unescapeSarifText("\\ \\ [ [ ] ]")).toBe("\\ \\ [ [ ] ]");
|
||||
});
|
||||
|
||||
it("should normalize source locations", () => {
|
||||
expect(getPathRelativeToSourceLocationPrefix("C:\\a\\b", "?x=test")).to.eq(
|
||||
expect(getPathRelativeToSourceLocationPrefix("C:\\a\\b", "?x=test")).toBe(
|
||||
"file:/C:/a/b/?x=test",
|
||||
);
|
||||
expect(
|
||||
getPathRelativeToSourceLocationPrefix("C:\\a\\b", "%3Fx%3Dtest"),
|
||||
).to.eq("file:/C:/a/b/%3Fx%3Dtest");
|
||||
).toBe("file:/C:/a/b/%3Fx%3Dtest");
|
||||
expect(
|
||||
getPathRelativeToSourceLocationPrefix("C:\\a =\\b c?", "?x=test"),
|
||||
).to.eq("file:/C:/a%20%3D/b%20c%3F/?x=test");
|
||||
expect(getPathRelativeToSourceLocationPrefix("/a/b/c", "?x=test")).to.eq(
|
||||
).toBe("file:/C:/a%20%3D/b%20c%3F/?x=test");
|
||||
expect(getPathRelativeToSourceLocationPrefix("/a/b/c", "?x=test")).toBe(
|
||||
"file:/a/b/c/?x=test",
|
||||
);
|
||||
});
|
||||
|
||||
describe("parseSarifLocation", () => {
|
||||
it('should parse a sarif location with "no location"', () => {
|
||||
expect(parseSarifLocation({}, "")).to.deep.equal({
|
||||
expect(parseSarifLocation({}, "")).toEqual({
|
||||
hint: "no physical location",
|
||||
});
|
||||
expect(parseSarifLocation({ physicalLocation: {} }, "")).to.deep.equal({
|
||||
expect(parseSarifLocation({ physicalLocation: {} }, "")).toEqual({
|
||||
hint: "no artifact location",
|
||||
});
|
||||
expect(
|
||||
parseSarifLocation({ physicalLocation: { artifactLocation: {} } }, ""),
|
||||
).to.deep.equal({
|
||||
).toEqual({
|
||||
hint: "artifact location has no uri",
|
||||
});
|
||||
});
|
||||
@@ -90,7 +86,7 @@ describe("parsing sarif", () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(parseSarifLocation(location, "prefix")).to.deep.equal({
|
||||
expect(parseSarifLocation(location, "prefix")).toEqual({
|
||||
uri: "file:/prefix/abc?x=test",
|
||||
userVisibleFile: "abc?x=test",
|
||||
});
|
||||
@@ -104,7 +100,7 @@ describe("parsing sarif", () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(parseSarifLocation(location, "prefix")).to.deep.equal({
|
||||
expect(parseSarifLocation(location, "prefix")).toEqual({
|
||||
uri: "file:/abc%3Fx%3Dtest",
|
||||
userVisibleFile: "/abc?x=test",
|
||||
});
|
||||
@@ -124,7 +120,7 @@ describe("parsing sarif", () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(parseSarifLocation(location, "prefix")).to.deep.equal({
|
||||
expect(parseSarifLocation(location, "prefix")).toEqual({
|
||||
uri: "file:abc%3Fx%3Dtest",
|
||||
userVisibleFile: "abc?x=test",
|
||||
startLine: 1,
|
||||
|
||||
@@ -1,89 +1,86 @@
|
||||
import { expect } from "chai";
|
||||
import "mocha";
|
||||
|
||||
import { humanizeRelativeTime, humanizeUnit } from "../../src/pure/time";
|
||||
|
||||
describe("Time", () => {
|
||||
it("should return a humanized unit", () => {
|
||||
expect(humanizeUnit(undefined)).to.eq("Less than a second");
|
||||
expect(humanizeUnit(0)).to.eq("Less than a second");
|
||||
expect(humanizeUnit(-1)).to.eq("Less than a second");
|
||||
expect(humanizeUnit(1000 - 1)).to.eq("Less than a second");
|
||||
expect(humanizeUnit(1000)).to.eq("1 second");
|
||||
expect(humanizeUnit(1000 * 2)).to.eq("2 seconds");
|
||||
expect(humanizeUnit(1000 * 60 - 1)).to.eq("59 seconds");
|
||||
expect(humanizeUnit(1000 * 60)).to.eq("1 minute");
|
||||
expect(humanizeUnit(1000 * 60 * 2 - 1)).to.eq("1 minute");
|
||||
expect(humanizeUnit(1000 * 60 * 2)).to.eq("2 minutes");
|
||||
expect(humanizeUnit(1000 * 60 * 60)).to.eq("1 hour");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 2)).to.eq("2 hours");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24)).to.eq("1 day");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 2)).to.eq("2 days");
|
||||
expect(humanizeUnit(undefined)).toBe("Less than a second");
|
||||
expect(humanizeUnit(0)).toBe("Less than a second");
|
||||
expect(humanizeUnit(-1)).toBe("Less than a second");
|
||||
expect(humanizeUnit(1000 - 1)).toBe("Less than a second");
|
||||
expect(humanizeUnit(1000)).toBe("1 second");
|
||||
expect(humanizeUnit(1000 * 2)).toBe("2 seconds");
|
||||
expect(humanizeUnit(1000 * 60 - 1)).toBe("59 seconds");
|
||||
expect(humanizeUnit(1000 * 60)).toBe("1 minute");
|
||||
expect(humanizeUnit(1000 * 60 * 2 - 1)).toBe("1 minute");
|
||||
expect(humanizeUnit(1000 * 60 * 2)).toBe("2 minutes");
|
||||
expect(humanizeUnit(1000 * 60 * 60)).toBe("1 hour");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 2)).toBe("2 hours");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24)).toBe("1 day");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 2)).toBe("2 days");
|
||||
|
||||
// assume every month has 30 days
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30)).to.eq("1 month");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 2)).to.eq("2 months");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 12)).to.eq("12 months");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30)).toBe("1 month");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 2)).toBe("2 months");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 12)).toBe("12 months");
|
||||
|
||||
// assume every year has 365 days
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365)).to.eq("1 year");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365 * 2)).to.eq("2 years");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365)).toBe("1 year");
|
||||
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365 * 2)).toBe("2 years");
|
||||
});
|
||||
|
||||
it("should return a humanized duration positive", () => {
|
||||
expect(humanizeRelativeTime(undefined)).to.eq("");
|
||||
expect(humanizeRelativeTime(0)).to.eq("this minute");
|
||||
expect(humanizeRelativeTime(1)).to.eq("this minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 - 1)).to.eq("this minute");
|
||||
expect(humanizeRelativeTime(1000 * 60)).to.eq("in 1 minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 2 - 1)).to.eq("in 1 minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 2)).to.eq("in 2 minutes");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60)).to.eq("in 1 hour");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 2)).to.eq("in 2 hours");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24)).to.eq("tomorrow");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 2)).to.eq("in 2 days");
|
||||
expect(humanizeRelativeTime(undefined)).toBe("");
|
||||
expect(humanizeRelativeTime(0)).toBe("this minute");
|
||||
expect(humanizeRelativeTime(1)).toBe("this minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 - 1)).toBe("this minute");
|
||||
expect(humanizeRelativeTime(1000 * 60)).toBe("in 1 minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 2 - 1)).toBe("in 1 minute");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 2)).toBe("in 2 minutes");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60)).toBe("in 1 hour");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 2)).toBe("in 2 hours");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24)).toBe("tomorrow");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 2)).toBe("in 2 days");
|
||||
|
||||
// assume every month has 30 days
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30)).to.eq("next month");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 2)).to.eq(
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30)).toBe("next month");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 2)).toBe(
|
||||
"in 2 months",
|
||||
);
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 12)).to.eq(
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 12)).toBe(
|
||||
"in 12 months",
|
||||
);
|
||||
|
||||
// assume every year has 365 days
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365)).to.eq("next year");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365 * 2)).to.eq(
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365)).toBe("next year");
|
||||
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365 * 2)).toBe(
|
||||
"in 2 years",
|
||||
);
|
||||
});
|
||||
|
||||
it("should return a humanized duration negative", () => {
|
||||
expect(humanizeRelativeTime(-1)).to.eq("this minute");
|
||||
expect(humanizeRelativeTime(-1000 * 60)).to.eq("1 minute ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 - 1)).to.eq("1 minute ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 2)).to.eq("2 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 2 - 1)).to.eq("2 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 3)).to.eq("3 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60)).to.eq("1 hour ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 - 1)).to.eq("1 hour ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 2)).to.eq("2 hours ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24)).to.eq("yesterday");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 2)).to.eq("2 days ago");
|
||||
expect(humanizeRelativeTime(-1)).toBe("this minute");
|
||||
expect(humanizeRelativeTime(-1000 * 60)).toBe("1 minute ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 - 1)).toBe("1 minute ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 2)).toBe("2 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 2 - 1)).toBe("2 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 3)).toBe("3 minutes ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60)).toBe("1 hour ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 - 1)).toBe("1 hour ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 2)).toBe("2 hours ago");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24)).toBe("yesterday");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 2)).toBe("2 days ago");
|
||||
|
||||
// assume every month has 30 days
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30)).to.eq("last month");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 2)).to.eq(
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30)).toBe("last month");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 2)).toBe(
|
||||
"2 months ago",
|
||||
);
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 12)).to.eq(
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 12)).toBe(
|
||||
"12 months ago",
|
||||
);
|
||||
|
||||
// assume every year has 365 days
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365)).to.eq("last year");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365 * 2)).to.eq(
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365)).toBe("last year");
|
||||
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365 * 2)).toBe(
|
||||
"2 years ago",
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { expect } from "chai";
|
||||
import {
|
||||
VariantAnalysis,
|
||||
parseVariantAnalysisQueryLanguage,
|
||||
@@ -13,17 +12,17 @@ import { createMockVariantAnalysis } from "../../src/vscode-tests/factories/remo
|
||||
|
||||
describe("parseVariantAnalysisQueryLanguage", () => {
|
||||
it("parses a valid language", () => {
|
||||
expect(parseVariantAnalysisQueryLanguage("javascript")).to.equal(
|
||||
expect(parseVariantAnalysisQueryLanguage("javascript")).toBe(
|
||||
VariantAnalysisQueryLanguage.Javascript,
|
||||
);
|
||||
});
|
||||
|
||||
it("returns undefined for an valid language", () => {
|
||||
expect(parseVariantAnalysisQueryLanguage("rubbish")).to.not.exist;
|
||||
expect(parseVariantAnalysisQueryLanguage("rubbish")).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("isVariantAnalysisComplete", async () => {
|
||||
describe("isVariantAnalysisComplete", () => {
|
||||
let variantAnalysis: VariantAnalysis;
|
||||
const uncallableArtifactDownloadChecker = () => {
|
||||
throw new Error("Should not be called");
|
||||
@@ -33,12 +32,12 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
variantAnalysis = createMockVariantAnalysis({});
|
||||
});
|
||||
|
||||
describe("when variant analysis status is InProgress", async () => {
|
||||
describe("when variant analysis status is InProgress", () => {
|
||||
beforeEach(() => {
|
||||
variantAnalysis.status = VariantAnalysisStatus.InProgress;
|
||||
});
|
||||
|
||||
describe("when scanned repos is undefined", async () => {
|
||||
describe("when scanned repos is undefined", () => {
|
||||
it("should say the variant analysis is not complete", async () => {
|
||||
variantAnalysis.scannedRepos = undefined;
|
||||
expect(
|
||||
@@ -46,24 +45,24 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
variantAnalysis,
|
||||
uncallableArtifactDownloadChecker,
|
||||
),
|
||||
).to.equal(false);
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when scanned repos is non-empty", async () => {
|
||||
describe("when not all results are downloaded", async () => {
|
||||
describe("when scanned repos is non-empty", () => {
|
||||
describe("when not all results are downloaded", () => {
|
||||
it("should say the variant analysis is not complete", async () => {
|
||||
expect(
|
||||
await isVariantAnalysisComplete(variantAnalysis, async () => false),
|
||||
).to.equal(false);
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when all results are downloaded", async () => {
|
||||
describe("when all results are downloaded", () => {
|
||||
it("should say the variant analysis is complete", async () => {
|
||||
expect(
|
||||
await isVariantAnalysisComplete(variantAnalysis, async () => true),
|
||||
).to.equal(false);
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -74,12 +73,12 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
VariantAnalysisStatus.Failed,
|
||||
VariantAnalysisStatus.Canceled,
|
||||
]) {
|
||||
describe(`when variant analysis status is ${variantAnalysisStatus}`, async () => {
|
||||
describe(`when variant analysis status is ${variantAnalysisStatus}`, () => {
|
||||
beforeEach(() => {
|
||||
variantAnalysis.status = variantAnalysisStatus;
|
||||
});
|
||||
|
||||
describe("when scanned repos is undefined", async () => {
|
||||
describe("when scanned repos is undefined", () => {
|
||||
it("should say the variant analysis is complete", async () => {
|
||||
variantAnalysis.scannedRepos = undefined;
|
||||
expect(
|
||||
@@ -87,11 +86,11 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
variantAnalysis,
|
||||
uncallableArtifactDownloadChecker,
|
||||
),
|
||||
).to.equal(true);
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when scanned repos is empty", async () => {
|
||||
describe("when scanned repos is empty", () => {
|
||||
it("should say the variant analysis is complete", async () => {
|
||||
variantAnalysis.scannedRepos = [];
|
||||
expect(
|
||||
@@ -99,11 +98,11 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
variantAnalysis,
|
||||
uncallableArtifactDownloadChecker,
|
||||
),
|
||||
).to.equal(true);
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when a repo scan is still in progress", async () => {
|
||||
describe("when a repo scan is still in progress", () => {
|
||||
it("should say the variant analysis is not complete", async () => {
|
||||
variantAnalysis.scannedRepos = [
|
||||
createMockScannedRepo(
|
||||
@@ -114,11 +113,11 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
];
|
||||
expect(
|
||||
await isVariantAnalysisComplete(variantAnalysis, async () => false),
|
||||
).to.equal(false);
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when not all results are downloaded", async () => {
|
||||
describe("when not all results are downloaded", () => {
|
||||
it("should say the variant analysis is not complete", async () => {
|
||||
variantAnalysis.scannedRepos = [
|
||||
createMockScannedRepo(
|
||||
@@ -129,11 +128,11 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
];
|
||||
expect(
|
||||
await isVariantAnalysisComplete(variantAnalysis, async () => false),
|
||||
).to.equal(false);
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when all results are downloaded", async () => {
|
||||
describe("when all results are downloaded", () => {
|
||||
it("should say the variant analysis is complete", async () => {
|
||||
variantAnalysis.scannedRepos = [
|
||||
createMockScannedRepo(
|
||||
@@ -144,7 +143,7 @@ describe("isVariantAnalysisComplete", async () => {
|
||||
];
|
||||
expect(
|
||||
await isVariantAnalysisComplete(variantAnalysis, async () => true),
|
||||
).to.equal(true);
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -157,7 +156,7 @@ describe("getActionsWorkflowRunUrl", () => {
|
||||
|
||||
const actionsWorkflowRunUrl = getActionsWorkflowRunUrl(variantAnalysis);
|
||||
|
||||
expect(actionsWorkflowRunUrl).to.equal(
|
||||
expect(actionsWorkflowRunUrl).toBe(
|
||||
`https://github.com/${variantAnalysis.controllerRepo.fullName}/actions/runs/${variantAnalysis.actionsWorkflowRunId}`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import { pluralize } from "../../src/pure/word";
|
||||
|
||||
describe("word helpers", () => {
|
||||
describe("pluralize", () => {
|
||||
it("should return the plural form if the number is 0", () => {
|
||||
expect(pluralize(0, "thing", "things")).to.eq("0 things");
|
||||
expect(pluralize(0, "thing", "things")).toBe("0 things");
|
||||
});
|
||||
it("should return the singular form if the number is 1", () => {
|
||||
expect(pluralize(1, "thing", "things")).to.eq("1 thing");
|
||||
expect(pluralize(1, "thing", "things")).toBe("1 thing");
|
||||
});
|
||||
it("should return the plural form if the number is greater than 1", () => {
|
||||
expect(pluralize(7, "thing", "things")).to.eq("7 things");
|
||||
expect(pluralize(7, "thing", "things")).toBe("7 things");
|
||||
});
|
||||
it("should return the empty string if the number is undefined", () => {
|
||||
expect(pluralize(undefined, "thing", "things")).to.eq("");
|
||||
expect(pluralize(undefined, "thing", "things")).toBe("");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user