Merge pull request #1780 from github/koesie10/jest-pure-tests

Convert pure tests to Jest
This commit is contained in:
Koen Vlaswinkel
2022-11-22 16:13:05 +01:00
committed by GitHub
32 changed files with 635 additions and 507 deletions

View File

@@ -5,7 +5,8 @@
"recommendations": [ "recommendations": [
"amodio.tsl-problem-matcher", "amodio.tsl-problem-matcher",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"esbenp.prettier-vscode" "esbenp.prettier-vscode",
"Orta.vscode-jest",
], ],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace. // List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [] "unwantedRecommendations": []

24
.vscode/launch.json vendored
View File

@@ -29,24 +29,16 @@
"name": "Launch Unit Tests (vscode-codeql)", "name": "Launch Unit Tests (vscode-codeql)",
"type": "node", "type": "node",
"request": "launch", "request": "launch",
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/mocha/bin/_mocha", "program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
"showAsyncStacks": true, "showAsyncStacks": true,
"cwd": "${workspaceFolder}/extensions/ql-vscode", "cwd": "${workspaceFolder}/extensions/ql-vscode",
"runtimeArgs": [
"--inspect=9229"
],
"env": { "env": {
"LANG": "en-US" "LANG": "en-US",
"TZ": "UTC"
}, },
"args": [ "args": [
"--exit", "--projects",
"-u", "test"
"bdd",
"--colors",
"--diff",
"--config",
".mocharc.json",
"test/pure-tests/**/*.ts"
], ],
"stopOnEntry": false, "stopOnEntry": false,
"sourceMaps": true, "sourceMaps": true,
@@ -60,6 +52,10 @@
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js", "program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
"showAsyncStacks": true, "showAsyncStacks": true,
"cwd": "${workspaceFolder}/extensions/ql-vscode", "cwd": "${workspaceFolder}/extensions/ql-vscode",
"args": [
"--projects",
"src/view"
],
"stopOnEntry": false, "stopOnEntry": false,
"sourceMaps": true, "sourceMaps": true,
"console": "integratedTerminal", "console": "integratedTerminal",
@@ -117,7 +113,7 @@
"--disable-extension", "--disable-extension",
"github.copilot", "github.copilot",
"${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data", "${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data",
// Uncomment the last line and modify the path to a checked out // Uncomment the last line and modify the path to a checked out
// instance of the codeql repository so the libraries are // instance of the codeql repository so the libraries are
// available in the workspace for the tests. // available in the workspace for the tests.
// "${workspaceRoot}/../codeql" // "${workspaceRoot}/../codeql"

View File

@@ -37,6 +37,11 @@
"javascript.preferences.quoteStyle": "single", "javascript.preferences.quoteStyle": "single",
"editor.wordWrapColumn": 100, "editor.wordWrapColumn": 100,
"jest.rootPath": "./extensions/ql-vscode", "jest.rootPath": "./extensions/ql-vscode",
"jest.autoRun": "watch",
"jest.nodeEnv": {
"LANG": "en-US",
"TZ": "UTC"
},
"[typescript]": { "[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true, "editor.formatOnSave": true,

View File

@@ -1,4 +0,0 @@
{
"exit": true,
"require": ["test/mocha.setup.js"]
}

View File

@@ -4,5 +4,5 @@
*/ */
module.exports = { module.exports = {
projects: ["<rootDir>/src/view"], projects: ["<rootDir>/src/view", "<rootDir>/test"],
}; };

View File

@@ -1270,7 +1270,7 @@
"watch:webpack": "gulp watchView", "watch:webpack": "gulp watchView",
"watch:files": "gulp watchTestData", "watch:files": "gulp watchTestData",
"test": "npm-run-all -p test:*", "test": "npm-run-all -p test:*",
"test:unit": "mocha --config .mocharc.json 'test/pure-tests/**/*.ts'", "test:unit": "jest --projects test",
"test:view": "jest --projects src/view", "test:view": "jest --projects src/view",
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace", "integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
"integration:no-workspace": "node ./out/vscode-tests/run-integration-tests.js no-workspace", "integration:no-workspace": "node ./out/vscode-tests/run-integration-tests.js no-workspace",

View File

@@ -2,7 +2,7 @@ import * as fs from "fs-extra";
import * as path from "path"; import * as path from "path";
import { cloneDbConfig, DbConfig } from "./db-config"; import { cloneDbConfig, DbConfig } from "./db-config";
import * as chokidar from "chokidar"; import * as chokidar from "chokidar";
import { DisposableObject } from "../../pure/disposable-object"; import { DisposableObject, DisposeHandler } from "../../pure/disposable-object";
import { DbConfigValidator } from "./db-config-validator"; import { DbConfigValidator } from "./db-config-validator";
import { ValueResult } from "../../common/value-result"; import { ValueResult } from "../../common/value-result";
import { App } from "../../common/app"; import { App } from "../../common/app";
@@ -38,7 +38,8 @@ export class DbConfigStore extends DisposableObject {
this.watchConfig(); this.watchConfig();
} }
public dispose(): void { public dispose(disposeHandler?: DisposeHandler): void {
super.dispose(disposeHandler);
this.configWatcher?.unwatch(this.configPath); this.configWatcher?.unwatch(this.configPath);
} }

View File

@@ -1,8 +1,8 @@
module.exports = { module.exports = {
env: { env: {
mocha: true jest: true,
}, },
parserOptions: { parserOptions: {
project: './test/tsconfig.json', project: "./test/tsconfig.json",
}, },
} };

View File

@@ -0,0 +1,201 @@
import type { Config } from "jest";
/*
* For a detailed explanation regarding each configuration property and type check, visit:
* https://jestjs.io/docs/configuration
*/
const config: Config = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/private/var/folders/6m/1394pht172qgd7dmw1fwjk100000gn/T/jest_dx",
// Automatically clear mock calls, instances, contexts and results before every test
// clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
// coverageDirectory: undefined,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// Indicates which provider should be used to instrument code for coverage
coverageProvider: "v8",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// The default configuration for fake timers
// fakeTimers: {
// "enableGlobally": false
// },
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// Insert Jest's globals (expect, test, describe, beforeEach etc.) into the global environment. If you set this to false, you should import from @jest/globals.
// injectGlobals: false,
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: 1,
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
moduleFileExtensions: ["js", "mjs", "cjs", "jsx", "ts", "tsx", "json"],
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
preset: "ts-jest",
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state before every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state and implementation before every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: 'vscode',
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
setupFilesAfterEnv: ["<rootDir>/jest.setup.ts"],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
// testEnvironment: 'jsdom',
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
testMatch: ["**/*.test.[jt]s"],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// A map from regular expressions to paths to transformers
transform: {
"^.+\\.tsx?$": [
"ts-jest",
{
tsconfig: "<rootDir>/tsconfig.json",
},
],
node_modules: [
"babel-jest",
{
presets: ["@babel/preset-env"],
plugins: ["@babel/plugin-transform-modules-commonjs"],
},
],
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// 'transformIgnorePatterns': [
// // These use ES modules, so need to be transformed
// 'node_modules/(?!(?:@vscode/webview-ui-toolkit|@microsoft/.+|exenv-es6)/.*)'
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};
export default config;

View File

@@ -0,0 +1,2 @@
process.env.TZ = "UTC";
process.env.LANG = "en-US";

View File

@@ -1,8 +0,0 @@
const path = require('path');
require('ts-node').register({
project: path.resolve(__dirname, 'tsconfig.json')
})
process.env.TZ = 'UTC';
process.env.LANG = 'en-US';

View File

@@ -1,4 +1,3 @@
import { expect } from "chai";
import * as path from "path"; import * as path from "path";
import * as fs from "fs-extra"; import * as fs from "fs-extra";
@@ -8,7 +7,7 @@ type CmdDecl = {
title?: string; title?: string;
}; };
describe("commands declared in package.json", function () { describe("commands declared in package.json", () => {
const manifest = fs.readJsonSync(path.join(__dirname, "../../package.json")); const manifest = fs.readJsonSync(path.join(__dirname, "../../package.json"));
const commands = manifest.contributes.commands; const commands = manifest.contributes.commands;
const menus = manifest.contributes.menus; const menus = manifest.contributes.menus;
@@ -31,7 +30,7 @@ describe("commands declared in package.json", function () {
const { command, title } = commandDecl; const { command, title } = commandDecl;
if (command.match(/^codeQL\./) || command.match(/^codeQLQueryResults\./)) { if (command.match(/^codeQL\./) || command.match(/^codeQLQueryResults\./)) {
paletteCmds.add(command); paletteCmds.add(command);
expect(title).not.to.be.undefined; expect(title).toBeDefined();
commandTitles[command] = title!; commandTitles[command] = title!;
} else if ( } else if (
command.match(/^codeQLDatabases\./) || command.match(/^codeQLDatabases\./) ||
@@ -42,10 +41,10 @@ describe("commands declared in package.json", function () {
command.match(/^codeQLTests\./) command.match(/^codeQLTests\./)
) { ) {
scopedCmds.add(command); scopedCmds.add(command);
expect(title).not.to.be.undefined; expect(title).toBeDefined();
commandTitles[command] = title!; commandTitles[command] = title!;
} else { } else {
expect.fail(`Unexpected command name ${command}`); fail(`Unexpected command name ${command}`);
} }
}); });
@@ -66,35 +65,27 @@ describe("commands declared in package.json", function () {
disabledInPalette.add(commandDecl.command); disabledInPalette.add(commandDecl.command);
}); });
it("should have commands appropriately prefixed", function () { it("should have commands appropriately prefixed", () => {
paletteCmds.forEach((command) => { paletteCmds.forEach((command) => {
expect( // command ${command} should be prefixed with 'CodeQL: ', since it is accessible from the command palette
commandTitles[command], expect(commandTitles[command]).toMatch(/^CodeQL: /);
`command ${command} should be prefixed with 'CodeQL: ', since it is accessible from the command palette`,
).to.match(/^CodeQL: /);
}); });
contribContextMenuCmds.forEach((command) => { contribContextMenuCmds.forEach((command) => {
expect( // command ${command} should be prefixed with 'CodeQL: ', since it is accessible from a context menu in a non-extension-controlled context
commandTitles[command], expect(commandTitles[command]).toMatch(/^CodeQL: /);
`command ${command} should be prefixed with 'CodeQL: ', since it is accessible from a context menu in a non-extension-controlled context`,
).to.match(/^CodeQL: /);
}); });
scopedCmds.forEach((command) => { scopedCmds.forEach((command) => {
expect( // command ${command} should not be prefixed with 'CodeQL: ', since it is accessible from an extension-controlled context
commandTitles[command], expect(commandTitles[command]).not.toMatch(/^CodeQL: /);
`command ${command} should not be prefixed with 'CodeQL: ', since it is accessible from an extension-controlled context`,
).not.to.match(/^CodeQL: /);
}); });
}); });
it("should have the right commands accessible from the command palette", function () { it("should have the right commands accessible from the command palette", () => {
paletteCmds.forEach((command) => { paletteCmds.forEach((command) => {
expect( // command ${command} should be enabled in the command palette
disabledInPalette.has(command), expect(disabledInPalette.has(command)).toBe(false);
`command ${command} should be enabled in the command palette`,
).to.be.false;
}); });
// Commands in contribContextMenuCmds may reasonbly be enabled or // Commands in contribContextMenuCmds may reasonbly be enabled or
@@ -103,10 +94,8 @@ describe("commands declared in package.json", function () {
// query to run, but codeQL.setCurrentDatabase is not. // query to run, but codeQL.setCurrentDatabase is not.
scopedCmds.forEach((command) => { scopedCmds.forEach((command) => {
expect( // command ${command} should be disabled in the command palette
disabledInPalette.has(command), expect(disabledInPalette.has(command)).toBe(true);
`command ${command} should be disabled in the command palette`,
).to.be.true;
}); });
}); });
}); });

View File

@@ -1,10 +1,9 @@
import * as fs from "fs-extra"; import * as fs from "fs-extra";
import * as path from "path"; import * as path from "path";
import { DbConfigStore } from "../../../../src/databases/config/db-config-store"; import { DbConfigStore } from "../../../../src/databases/config/db-config-store";
import { expect } from "chai";
import { createMockApp } from "../../../__mocks__/appMock"; import { createMockApp } from "../../../__mocks__/appMock";
describe("db config store", async () => { describe("db config store", () => {
const extensionPath = path.join(__dirname, "../../../.."); const extensionPath = path.join(__dirname, "../../../..");
const tempWorkspaceStoragePath = path.join(__dirname, "test-workspace"); const tempWorkspaceStoragePath = path.join(__dirname, "test-workspace");
const testDataStoragePath = path.join(__dirname, "data"); const testDataStoragePath = path.join(__dirname, "data");
@@ -31,15 +30,17 @@ describe("db config store", async () => {
const configStore = new DbConfigStore(app); const configStore = new DbConfigStore(app);
await configStore.initialize(); await configStore.initialize();
expect(await fs.pathExists(configPath)).to.be.true; expect(await fs.pathExists(configPath)).toBe(true);
const config = configStore.getConfig().value; const config = configStore.getConfig().value;
expect(config.databases.remote.repositoryLists).to.be.empty; expect(config.databases.remote.repositoryLists).toHaveLength(0);
expect(config.databases.remote.owners).to.be.empty; expect(config.databases.remote.owners).toHaveLength(0);
expect(config.databases.remote.repositories).to.be.empty; expect(config.databases.remote.repositories).toHaveLength(0);
expect(config.databases.local.lists).to.be.empty; expect(config.databases.local.lists).toHaveLength(0);
expect(config.databases.local.databases).to.be.empty; expect(config.databases.local.databases).toHaveLength(0);
expect(config.selected).to.be.undefined; expect(config.selected).toBeUndefined();
configStore.dispose();
}); });
it("should load an existing config", async () => { it("should load an existing config", async () => {
@@ -51,20 +52,20 @@ describe("db config store", async () => {
await configStore.initialize(); await configStore.initialize();
const config = configStore.getConfig().value; const config = configStore.getConfig().value;
expect(config.databases.remote.repositoryLists).to.have.length(1); expect(config.databases.remote.repositoryLists).toHaveLength(1);
expect(config.databases.remote.repositoryLists[0]).to.deep.equal({ expect(config.databases.remote.repositoryLists[0]).toEqual({
name: "repoList1", name: "repoList1",
repositories: ["foo/bar", "foo/baz"], repositories: ["foo/bar", "foo/baz"],
}); });
expect(config.databases.remote.owners).to.be.empty; expect(config.databases.remote.owners).toHaveLength(0);
expect(config.databases.remote.repositories).to.have.length(3); expect(config.databases.remote.repositories).toHaveLength(3);
expect(config.databases.remote.repositories).to.deep.equal([ expect(config.databases.remote.repositories).toEqual([
"owner/repo1", "owner/repo1",
"owner/repo2", "owner/repo2",
"owner/repo3", "owner/repo3",
]); ]);
expect(config.databases.local.lists).to.have.length(2); expect(config.databases.local.lists).toHaveLength(2);
expect(config.databases.local.lists[0]).to.deep.equal({ expect(config.databases.local.lists[0]).toEqual({
name: "localList1", name: "localList1",
databases: [ databases: [
{ {
@@ -75,17 +76,19 @@ describe("db config store", async () => {
}, },
], ],
}); });
expect(config.databases.local.databases).to.have.length(1); expect(config.databases.local.databases).toHaveLength(1);
expect(config.databases.local.databases[0]).to.deep.equal({ expect(config.databases.local.databases[0]).toEqual({
name: "example-db", name: "example-db",
dateAdded: 1668096927267, dateAdded: 1668096927267,
language: "ruby", language: "ruby",
storagePath: "/path/to/database/", storagePath: "/path/to/database/",
}); });
expect(config.selected).to.deep.equal({ expect(config.selected).toEqual({
kind: "configDefined", kind: "configDefined",
value: "path.to.database", value: "path.to.database",
}); });
configStore.dispose();
}); });
it("should load an existing config without selected db", async () => { it("should load an existing config without selected db", async () => {
@@ -104,7 +107,9 @@ describe("db config store", async () => {
await configStore.initialize(); await configStore.initialize();
const config = configStore.getConfig().value; const config = configStore.getConfig().value;
expect(config.selected).to.be.undefined; expect(config.selected).toBeUndefined();
configStore.dispose();
}); });
it("should not allow modification of the config", async () => { it("should not allow modification of the config", async () => {
@@ -119,8 +124,8 @@ describe("db config store", async () => {
config.databases.remote.repositoryLists = []; config.databases.remote.repositoryLists = [];
const reRetrievedConfig = configStore.getConfig().value; const reRetrievedConfig = configStore.getConfig().value;
expect(reRetrievedConfig.databases.remote.repositoryLists).to.have.length( expect(reRetrievedConfig.databases.remote.repositoryLists).toHaveLength(1);
1,
); configStore.dispose();
}); });
}); });

View File

@@ -1,10 +1,9 @@
import { expect } from "chai";
import * as path from "path"; import * as path from "path";
import { DbConfig } from "../../../../src/databases/config/db-config"; import { DbConfig } from "../../../../src/databases/config/db-config";
import { DbConfigValidator } from "../../../../src/databases/config/db-config-validator"; import { DbConfigValidator } from "../../../../src/databases/config/db-config-validator";
describe("db config validation", async () => { describe("db config validation", () => {
const extensionPath = path.join(__dirname, "../../.."); const extensionPath = path.join(__dirname, "../../../..");
const configValidator = new DbConfigValidator(extensionPath); const configValidator = new DbConfigValidator(extensionPath);
it("should return error when file is not valid", async () => { it("should return error when file is not valid", async () => {
@@ -27,15 +26,15 @@ describe("db config validation", async () => {
const validationOutput = configValidator.validate(dbConfig); const validationOutput = configValidator.validate(dbConfig);
expect(validationOutput).to.have.length(3); expect(validationOutput).toHaveLength(3);
expect(validationOutput[0]).to.deep.equal( expect(validationOutput[0]).toEqual(
"/databases must have required property 'local'", "/databases must have required property 'local'",
); );
expect(validationOutput[1]).to.deep.equal( expect(validationOutput[1]).toEqual(
"/databases/remote must have required property 'owners'", "/databases/remote must have required property 'owners'",
); );
expect(validationOutput[2]).to.deep.equal( expect(validationOutput[2]).toEqual(
"/databases/remote must NOT have additional properties", "/databases/remote must NOT have additional properties",
); );
}); });

View File

@@ -1,5 +1,3 @@
import { expect } from "chai";
import { DbConfig } from "../../../src/databases/config/db-config"; import { DbConfig } from "../../../src/databases/config/db-config";
import { DbItemKind } from "../../../src/databases/db-item"; import { DbItemKind } from "../../../src/databases/db-item";
import { import {
@@ -26,22 +24,22 @@ describe("db tree creator", () => {
const dbTreeRoot = createRemoteTree(dbConfig); const dbTreeRoot = createRemoteTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote); expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
expect(dbTreeRoot.children.length).to.equal(3); expect(dbTreeRoot.children.length).toBe(3);
expect(dbTreeRoot.children[0]).to.deep.equal({ expect(dbTreeRoot.children[0]).toEqual({
kind: DbItemKind.RemoteSystemDefinedList, kind: DbItemKind.RemoteSystemDefinedList,
listName: "top_10", listName: "top_10",
listDisplayName: "Top 10 repositories", listDisplayName: "Top 10 repositories",
listDescription: "Top 10 repositories of a language", listDescription: "Top 10 repositories of a language",
}); });
expect(dbTreeRoot.children[1]).to.deep.equal({ expect(dbTreeRoot.children[1]).toEqual({
kind: DbItemKind.RemoteSystemDefinedList, kind: DbItemKind.RemoteSystemDefinedList,
listName: "top_100", listName: "top_100",
listDisplayName: "Top 100 repositories", listDisplayName: "Top 100 repositories",
listDescription: "Top 100 repositories of a language", listDescription: "Top 100 repositories of a language",
}); });
expect(dbTreeRoot.children[2]).to.deep.equal({ expect(dbTreeRoot.children[2]).toEqual({
kind: DbItemKind.RemoteSystemDefinedList, kind: DbItemKind.RemoteSystemDefinedList,
listName: "top_1000", listName: "top_1000",
listDisplayName: "Top 1000 repositories", listDisplayName: "Top 1000 repositories",
@@ -75,14 +73,14 @@ describe("db tree creator", () => {
const dbTreeRoot = createRemoteTree(dbConfig); const dbTreeRoot = createRemoteTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote); expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
const repositoryListNodes = dbTreeRoot.children.filter( const repositoryListNodes = dbTreeRoot.children.filter(
(child) => child.kind === DbItemKind.RemoteUserDefinedList, (child) => child.kind === DbItemKind.RemoteUserDefinedList,
); );
expect(repositoryListNodes.length).to.equal(2); expect(repositoryListNodes.length).toBe(2);
expect(repositoryListNodes[0]).to.deep.equal({ expect(repositoryListNodes[0]).toEqual({
kind: DbItemKind.RemoteUserDefinedList, kind: DbItemKind.RemoteUserDefinedList,
listName: dbConfig.databases.remote.repositoryLists[0].name, listName: dbConfig.databases.remote.repositoryLists[0].name,
repos: dbConfig.databases.remote.repositoryLists[0].repositories.map( repos: dbConfig.databases.remote.repositoryLists[0].repositories.map(
@@ -92,7 +90,7 @@ describe("db tree creator", () => {
}), }),
), ),
}); });
expect(repositoryListNodes[1]).to.deep.equal({ expect(repositoryListNodes[1]).toEqual({
kind: DbItemKind.RemoteUserDefinedList, kind: DbItemKind.RemoteUserDefinedList,
listName: dbConfig.databases.remote.repositoryLists[1].name, listName: dbConfig.databases.remote.repositoryLists[1].name,
repos: dbConfig.databases.remote.repositoryLists[1].repositories.map( repos: dbConfig.databases.remote.repositoryLists[1].repositories.map(
@@ -121,18 +119,18 @@ describe("db tree creator", () => {
const dbTreeRoot = createRemoteTree(dbConfig); const dbTreeRoot = createRemoteTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote); expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
const ownerNodes = dbTreeRoot.children.filter( const ownerNodes = dbTreeRoot.children.filter(
(child) => child.kind === DbItemKind.RemoteOwner, (child) => child.kind === DbItemKind.RemoteOwner,
); );
expect(ownerNodes.length).to.equal(2); expect(ownerNodes.length).toBe(2);
expect(ownerNodes[0]).to.deep.equal({ expect(ownerNodes[0]).toEqual({
kind: DbItemKind.RemoteOwner, kind: DbItemKind.RemoteOwner,
ownerName: dbConfig.databases.remote.owners[0], ownerName: dbConfig.databases.remote.owners[0],
}); });
expect(ownerNodes[1]).to.deep.equal({ expect(ownerNodes[1]).toEqual({
kind: DbItemKind.RemoteOwner, kind: DbItemKind.RemoteOwner,
ownerName: dbConfig.databases.remote.owners[1], ownerName: dbConfig.databases.remote.owners[1],
}); });
@@ -155,22 +153,22 @@ describe("db tree creator", () => {
const dbTreeRoot = createRemoteTree(dbConfig); const dbTreeRoot = createRemoteTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootRemote); expect(dbTreeRoot.kind).toBe(DbItemKind.RootRemote);
const repoNodes = dbTreeRoot.children.filter( const repoNodes = dbTreeRoot.children.filter(
(child) => child.kind === DbItemKind.RemoteRepo, (child) => child.kind === DbItemKind.RemoteRepo,
); );
expect(repoNodes.length).to.equal(3); expect(repoNodes.length).toBe(3);
expect(repoNodes[0]).to.deep.equal({ expect(repoNodes[0]).toEqual({
kind: DbItemKind.RemoteRepo, kind: DbItemKind.RemoteRepo,
repoFullName: dbConfig.databases.remote.repositories[0], repoFullName: dbConfig.databases.remote.repositories[0],
}); });
expect(repoNodes[1]).to.deep.equal({ expect(repoNodes[1]).toEqual({
kind: DbItemKind.RemoteRepo, kind: DbItemKind.RemoteRepo,
repoFullName: dbConfig.databases.remote.repositories[1], repoFullName: dbConfig.databases.remote.repositories[1],
}); });
expect(repoNodes[2]).to.deep.equal({ expect(repoNodes[2]).toEqual({
kind: DbItemKind.RemoteRepo, kind: DbItemKind.RemoteRepo,
repoFullName: dbConfig.databases.remote.repositories[2], repoFullName: dbConfig.databases.remote.repositories[2],
}); });
@@ -194,9 +192,9 @@ describe("db tree creator", () => {
const dbTreeRoot = createLocalTree(dbConfig); const dbTreeRoot = createLocalTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal); expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
expect(dbTreeRoot.children.length).to.equal(0); expect(dbTreeRoot.children.length).toBe(0);
}); });
it("should create local list nodes", () => { it("should create local list nodes", () => {
@@ -245,14 +243,14 @@ describe("db tree creator", () => {
const dbTreeRoot = createLocalTree(dbConfig); const dbTreeRoot = createLocalTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal); expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
const localListNodes = dbTreeRoot.children.filter( const localListNodes = dbTreeRoot.children.filter(
(child) => child.kind === DbItemKind.LocalList, (child) => child.kind === DbItemKind.LocalList,
); );
expect(localListNodes.length).to.equal(2); expect(localListNodes.length).toBe(2);
expect(localListNodes[0]).to.deep.equal({ expect(localListNodes[0]).toEqual({
kind: DbItemKind.LocalList, kind: DbItemKind.LocalList,
listName: dbConfig.databases.local.lists[0].name, listName: dbConfig.databases.local.lists[0].name,
databases: dbConfig.databases.local.lists[0].databases.map((db) => ({ databases: dbConfig.databases.local.lists[0].databases.map((db) => ({
@@ -263,7 +261,7 @@ describe("db tree creator", () => {
storagePath: db.storagePath, storagePath: db.storagePath,
})), })),
}); });
expect(localListNodes[1]).to.deep.equal({ expect(localListNodes[1]).toEqual({
kind: DbItemKind.LocalList, kind: DbItemKind.LocalList,
listName: dbConfig.databases.local.lists[1].name, listName: dbConfig.databases.local.lists[1].name,
databases: dbConfig.databases.local.lists[1].databases.map((db) => ({ databases: dbConfig.databases.local.lists[1].databases.map((db) => ({
@@ -306,21 +304,21 @@ describe("db tree creator", () => {
const dbTreeRoot = createLocalTree(dbConfig); const dbTreeRoot = createLocalTree(dbConfig);
expect(dbTreeRoot).to.be.ok; expect(dbTreeRoot).toBeTruthy();
expect(dbTreeRoot.kind).to.equal(DbItemKind.RootLocal); expect(dbTreeRoot.kind).toBe(DbItemKind.RootLocal);
const localDatabaseNodes = dbTreeRoot.children.filter( const localDatabaseNodes = dbTreeRoot.children.filter(
(child) => child.kind === DbItemKind.LocalDatabase, (child) => child.kind === DbItemKind.LocalDatabase,
); );
expect(localDatabaseNodes.length).to.equal(2); expect(localDatabaseNodes.length).toBe(2);
expect(localDatabaseNodes[0]).to.deep.equal({ expect(localDatabaseNodes[0]).toEqual({
kind: DbItemKind.LocalDatabase, kind: DbItemKind.LocalDatabase,
databaseName: dbConfig.databases.local.databases[0].name, databaseName: dbConfig.databases.local.databases[0].name,
dateAdded: dbConfig.databases.local.databases[0].dateAdded, dateAdded: dbConfig.databases.local.databases[0].dateAdded,
language: dbConfig.databases.local.databases[0].language, language: dbConfig.databases.local.databases[0].language,
storagePath: dbConfig.databases.local.databases[0].storagePath, storagePath: dbConfig.databases.local.databases[0].storagePath,
}); });
expect(localDatabaseNodes[1]).to.deep.equal({ expect(localDatabaseNodes[1]).toEqual({
kind: DbItemKind.LocalDatabase, kind: DbItemKind.LocalDatabase,
databaseName: dbConfig.databases.local.databases[1].name, databaseName: dbConfig.databases.local.databases[1].name,
dateAdded: dbConfig.databases.local.databases[1].dateAdded, dateAdded: dbConfig.databases.local.databases[1].dateAdded,

View File

@@ -1,11 +1,8 @@
import { expect } from "chai";
import "mocha";
import { formatDate } from "../../src/pure/date"; import { formatDate } from "../../src/pure/date";
describe("Date", () => { describe("Date", () => {
it("should return a formatted date", () => { it("should return a formatted date", () => {
expect(formatDate(new Date(1663326904000))).to.eq("Sep 16, 11:15 AM"); expect(formatDate(new Date(1663326904000))).toBe("Sep 16, 11:15 AM");
expect(formatDate(new Date(1631783704000))).to.eq("Sep 16, 2021, 9:15 AM"); expect(formatDate(new Date(1631783704000))).toBe("Sep 16, 2021, 9:15 AM");
}); });
}); });

View File

@@ -1,36 +1,31 @@
import "chai";
import "chai/register-should";
import "sinon-chai";
import * as sinon from "sinon";
import "mocha";
import { DisposableObject } from "../../src/pure/disposable-object"; import { DisposableObject } from "../../src/pure/disposable-object";
import { expect } from "chai";
describe("DisposableObject and DisposeHandler", () => { describe("DisposableObject and DisposeHandler", () => {
let disposable1: { dispose: sinon.SinonSpy }; const disposable1 = {
let disposable2: { dispose: sinon.SinonSpy }; dispose: jest.fn(),
let disposable3: { dispose: sinon.SinonSpy }; };
let disposable4: { dispose: sinon.SinonSpy }; const disposable2 = {
dispose: jest.fn(),
};
const disposable3 = {
dispose: jest.fn(),
};
const disposable4 = {
dispose: jest.fn(),
};
let disposableObject: any; let disposableObject: any;
let nestedDisposableObject: any; let nestedDisposableObject: any;
const sandbox = sinon.createSandbox();
beforeEach(() => { beforeEach(() => {
sandbox.restore(); disposable1.dispose.mockClear();
disposable1 = { dispose: sandbox.spy() }; disposable2.dispose.mockClear();
disposable2 = { dispose: sandbox.spy() }; disposable3.dispose.mockClear();
disposable3 = { dispose: sandbox.spy() }; disposable4.dispose.mockClear();
disposable4 = { dispose: sandbox.spy() };
disposableObject = new MyDisposableObject(); disposableObject = new MyDisposableObject();
nestedDisposableObject = new MyDisposableObject(); nestedDisposableObject = new MyDisposableObject();
}); });
afterEach(() => {
sandbox.restore();
});
it("should dispose tracked and pushed objects", () => { it("should dispose tracked and pushed objects", () => {
disposableObject.push(disposable1); disposableObject.push(disposable1);
disposableObject.push(disposable2); disposableObject.push(disposable2);
@@ -39,45 +34,47 @@ describe("DisposableObject and DisposeHandler", () => {
disposableObject.dispose(); disposableObject.dispose();
expect(disposable1.dispose).to.have.been.called; expect(disposable1.dispose).toBeCalled();
expect(disposable2.dispose).to.have.been.called; expect(disposable2.dispose).toBeCalled();
expect(disposable3.dispose).to.have.been.called; expect(disposable3.dispose).toBeCalled();
// pushed items must be called in reverse order // pushed items must be called in reverse order
sinon.assert.callOrder(disposable2.dispose, disposable1.dispose); expect(disposable2.dispose.mock.invocationCallOrder[0]).toBeLessThan(
disposable1.dispose.mock.invocationCallOrder[0],
);
// now that disposableObject has been disposed, subsequent disposals are // now that disposableObject has been disposed, subsequent disposals are
// no-ops // no-ops
disposable1.dispose.resetHistory(); disposable1.dispose.mockClear();
disposable2.dispose.resetHistory(); disposable2.dispose.mockClear();
disposable3.dispose.resetHistory(); disposable3.dispose.mockClear();
disposableObject.dispose(); disposableObject.dispose();
expect(disposable1.dispose).not.to.have.been.called; expect(disposable1.dispose).not.toBeCalled();
expect(disposable2.dispose).not.to.have.been.called; expect(disposable2.dispose).not.toBeCalled();
expect(disposable3.dispose).not.to.have.been.called; expect(disposable3.dispose).not.toBeCalled();
}); });
it("should dispose and stop tracking objects", () => { it("should dispose and stop tracking objects", () => {
disposableObject.track(disposable1); disposableObject.track(disposable1);
disposableObject.disposeAndStopTracking(disposable1); disposableObject.disposeAndStopTracking(disposable1);
expect(disposable1.dispose).to.have.been.called; expect(disposable1.dispose).toBeCalled();
disposable1.dispose.resetHistory(); disposable1.dispose.mockClear();
disposableObject.dispose(); disposableObject.dispose();
expect(disposable1.dispose).not.to.have.been.called; expect(disposable1.dispose).not.toBeCalled();
}); });
it("should avoid disposing an object that is not tracked", () => { it("should avoid disposing an object that is not tracked", () => {
disposableObject.push(disposable1); disposableObject.push(disposable1);
disposableObject.disposeAndStopTracking(disposable1); disposableObject.disposeAndStopTracking(disposable1);
expect(disposable1.dispose).not.to.have.been.called; expect(disposable1.dispose).not.toBeCalled();
disposableObject.dispose(); disposableObject.dispose();
expect(disposable1.dispose).to.have.been.called; expect(disposable1.dispose).toBeCalled();
}); });
it("ahould use a dispose handler", () => { it("ahould use a dispose handler", () => {
@@ -94,24 +91,24 @@ describe("DisposableObject and DisposeHandler", () => {
disposableObject.dispose(handler); disposableObject.dispose(handler);
expect(disposable1.dispose).to.have.been.called; expect(disposable1.dispose).toBeCalled();
expect(disposable2.dispose).not.to.have.been.called; expect(disposable2.dispose).not.toBeCalled();
expect(disposable3.dispose).to.have.been.called; expect(disposable3.dispose).toBeCalled();
expect(disposable4.dispose).not.to.have.been.called; expect(disposable4.dispose).not.toBeCalled();
// now that disposableObject has been disposed, subsequent disposals are // now that disposableObject has been disposed, subsequent disposals are
// no-ops // no-ops
disposable1.dispose.resetHistory(); disposable1.dispose.mockClear();
disposable2.dispose.resetHistory(); disposable2.dispose.mockClear();
disposable3.dispose.resetHistory(); disposable3.dispose.mockClear();
disposable4.dispose.resetHistory(); disposable4.dispose.mockClear();
disposableObject.dispose(); disposableObject.dispose();
expect(disposable1.dispose).not.to.have.been.called; expect(disposable1.dispose).not.toBeCalled();
expect(disposable2.dispose).not.to.have.been.called; expect(disposable2.dispose).not.toBeCalled();
expect(disposable3.dispose).not.to.have.been.called; expect(disposable3.dispose).not.toBeCalled();
expect(disposable4.dispose).not.to.have.been.called; expect(disposable4.dispose).not.toBeCalled();
}); });
class MyDisposableObject extends DisposableObject { class MyDisposableObject extends DisposableObject {

View File

@@ -1,45 +1,36 @@
import * as chai from "chai";
import "chai/register-should";
import * as sinonChai from "sinon-chai";
import "mocha";
import * as path from "path"; import * as path from "path";
import * as chaiAsPromised from "chai-as-promised";
import { import {
gatherQlFiles, gatherQlFiles,
getDirectoryNamesInsidePath, getDirectoryNamesInsidePath,
} from "../../src/pure/files"; } from "../../src/pure/files";
chai.use(sinonChai);
chai.use(chaiAsPromised);
const expect = chai.expect;
describe("files", () => { describe("files", () => {
const dataDir = path.join(path.dirname(__dirname), "data"); const dataDir = path.join(path.dirname(__dirname), "data");
const data2Dir = path.join(path.dirname(__dirname), "data2"); const data2Dir = path.join(path.dirname(__dirname), "data2");
describe("gatherQlFiles", async () => { describe("gatherQlFiles", () => {
it("should find one file", async () => { it("should find one file", async () => {
const singleFile = path.join(dataDir, "query.ql"); const singleFile = path.join(dataDir, "query.ql");
const result = await gatherQlFiles([singleFile]); const result = await gatherQlFiles([singleFile]);
expect(result).to.deep.equal([[singleFile], false]); expect(result).toEqual([[singleFile], false]);
}); });
it("should find no files", async () => { it("should find no files", async () => {
const result = await gatherQlFiles([]); const result = await gatherQlFiles([]);
expect(result).to.deep.equal([[], false]); expect(result).toEqual([[], false]);
}); });
it("should find no files", async () => { it("should find no files", async () => {
const singleFile = path.join(dataDir, "library.qll"); const singleFile = path.join(dataDir, "library.qll");
const result = await gatherQlFiles([singleFile]); const result = await gatherQlFiles([singleFile]);
expect(result).to.deep.equal([[], false]); expect(result).toEqual([[], false]);
}); });
it("should handle invalid file", async () => { it("should handle invalid file", async () => {
const singleFile = path.join(dataDir, "xxx"); const singleFile = path.join(dataDir, "xxx");
const result = await gatherQlFiles([singleFile]); const result = await gatherQlFiles([singleFile]);
expect(result).to.deep.equal([[], false]); expect(result).toEqual([[], false]);
}); });
it("should find two files", async () => { it("should find two files", async () => {
@@ -54,7 +45,7 @@ describe("files", () => {
notFile, notFile,
invalidFile, invalidFile,
]); ]);
expect(result.sort()).to.deep.equal([[singleFile, otherFile], false]); expect(result.sort()).toEqual([[singleFile, otherFile], false]);
}); });
it("should scan a directory", async () => { it("should scan a directory", async () => {
@@ -63,7 +54,7 @@ describe("files", () => {
const file3 = path.join(dataDir, "query.ql"); const file3 = path.join(dataDir, "query.ql");
const result = await gatherQlFiles([dataDir]); const result = await gatherQlFiles([dataDir]);
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]); expect(result.sort()).toEqual([[file1, file2, file3], true]);
}); });
it("should scan a directory and some files", async () => { it("should scan a directory and some files", async () => {
@@ -72,7 +63,7 @@ describe("files", () => {
const empty2File = path.join(data2Dir, "sub-folder", "empty2.ql"); const empty2File = path.join(data2Dir, "sub-folder", "empty2.ql");
const result = await gatherQlFiles([singleFile, data2Dir]); const result = await gatherQlFiles([singleFile, data2Dir]);
expect(result.sort()).to.deep.equal([ expect(result.sort()).toEqual([
[singleFile, empty1File, empty2File], [singleFile, empty1File, empty2File],
true, true,
]); ]);
@@ -85,27 +76,27 @@ describe("files", () => {
const result = await gatherQlFiles([file1, dataDir, file3]); const result = await gatherQlFiles([file1, dataDir, file3]);
result[0].sort(); result[0].sort();
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]); expect(result.sort()).toEqual([[file1, file2, file3], true]);
}); });
}); });
describe("getDirectoryNamesInsidePath", async () => { describe("getDirectoryNamesInsidePath", () => {
it("should fail if path does not exist", async () => { it("should fail if path does not exist", async () => {
await expect( await expect(getDirectoryNamesInsidePath("xxx")).rejects.toThrow(
getDirectoryNamesInsidePath("xxx"), "Path does not exist: xxx",
).to.eventually.be.rejectedWith("Path does not exist: xxx"); );
}); });
it("should fail if path is not a directory", async () => { it("should fail if path is not a directory", async () => {
const filePath = path.join(data2Dir, "empty1.ql"); const filePath = path.join(data2Dir, "empty1.ql");
await expect( await expect(getDirectoryNamesInsidePath(filePath)).rejects.toThrow(
getDirectoryNamesInsidePath(filePath), `Path is not a directory: ${filePath}`,
).to.eventually.be.rejectedWith(`Path is not a directory: ${filePath}`); );
}); });
it("should find sub-folders", async () => { it("should find sub-folders", async () => {
const result = await getDirectoryNamesInsidePath(data2Dir); const result = await getDirectoryNamesInsidePath(data2Dir);
expect(result).to.deep.equal(["sub-folder"]); expect(result).toEqual(["sub-folder"]);
}); });
}); });
}); });

View File

@@ -1,13 +1,10 @@
import { fail } from "assert";
import { expect } from "chai";
import { asyncFilter, getErrorMessage } from "../../src/pure/helpers-pure"; import { asyncFilter, getErrorMessage } from "../../src/pure/helpers-pure";
describe("helpers-pure", () => { describe("helpers-pure", () => {
it("should filter asynchronously", async () => { it("should filter asynchronously", async () => {
expect( expect(await asyncFilter([1, 2, 3], (x) => Promise.resolve(x > 2))).toEqual(
await asyncFilter([1, 2, 3], (x) => Promise.resolve(x > 2)), [3],
).to.deep.eq([3]); );
}); });
it("should throw on error when filtering", async () => { it("should throw on error when filtering", async () => {
@@ -18,7 +15,7 @@ describe("helpers-pure", () => {
await asyncFilter([1, 2, 3], rejects); await asyncFilter([1, 2, 3], rejects);
fail("Should have thrown"); fail("Should have thrown");
} catch (e) { } catch (e) {
expect(getErrorMessage(e)).to.eq("opps"); expect(getErrorMessage(e)).toBe("opps");
} }
}); });
}); });

View File

@@ -1,26 +1,24 @@
import { expect } from "chai";
import "mocha";
import { import {
tryGetRemoteLocation, tryGetRemoteLocation,
tryGetResolvableLocation, tryGetResolvableLocation,
} from "../../src/pure/bqrs-utils"; } from "../../src/pure/bqrs-utils";
describe("processing string locations", function () { describe("processing string locations", () => {
it("should detect Windows whole-file locations", function () { it("should detect Windows whole-file locations", () => {
const loc = "file://C:/path/to/file.ext:0:0:0:0"; const loc = "file://C:/path/to/file.ext:0:0:0:0";
const wholeFileLoc = tryGetResolvableLocation(loc); const wholeFileLoc = tryGetResolvableLocation(loc);
expect(wholeFileLoc).to.eql({ uri: "C:/path/to/file.ext" }); expect(wholeFileLoc).toEqual({ uri: "C:/path/to/file.ext" });
}); });
it("should detect Unix whole-file locations", function () { it("should detect Unix whole-file locations", () => {
const loc = "file:///path/to/file.ext:0:0:0:0"; const loc = "file:///path/to/file.ext:0:0:0:0";
const wholeFileLoc = tryGetResolvableLocation(loc); const wholeFileLoc = tryGetResolvableLocation(loc);
expect(wholeFileLoc).to.eql({ uri: "/path/to/file.ext" }); expect(wholeFileLoc).toEqual({ uri: "/path/to/file.ext" });
}); });
it("should detect Unix 5-part locations", function () { it("should detect Unix 5-part locations", () => {
const loc = "file:///path/to/file.ext:1:2:3:4"; const loc = "file:///path/to/file.ext:1:2:3:4";
const wholeFileLoc = tryGetResolvableLocation(loc); const wholeFileLoc = tryGetResolvableLocation(loc);
expect(wholeFileLoc).to.eql({ expect(wholeFileLoc).toEqual({
uri: "/path/to/file.ext", uri: "/path/to/file.ext",
startLine: 1, startLine: 1,
startColumn: 2, startColumn: 2,
@@ -28,16 +26,16 @@ describe("processing string locations", function () {
endColumn: 4, endColumn: 4,
}); });
}); });
it("should ignore other string locations", function () { it("should ignore other string locations", () => {
for (const loc of ["file:///path/to/file.ext", "I am not a location"]) { for (const loc of ["file:///path/to/file.ext", "I am not a location"]) {
const wholeFileLoc = tryGetResolvableLocation(loc); const wholeFileLoc = tryGetResolvableLocation(loc);
expect(wholeFileLoc).to.be.undefined; expect(wholeFileLoc).toBeUndefined();
} }
}); });
}); });
describe("getting links to remote (GitHub) locations", function () { describe("getting links to remote (GitHub) locations", () => {
it("should return undefined if resolvableLocation is undefined", function () { it("should return undefined if resolvableLocation is undefined", () => {
const loc = "not a location"; const loc = "not a location";
const fileLinkPrefix = ""; const fileLinkPrefix = "";
const sourceLocationPrefix = ""; const sourceLocationPrefix = "";
@@ -48,10 +46,10 @@ describe("getting links to remote (GitHub) locations", function () {
sourceLocationPrefix, sourceLocationPrefix,
); );
expect(link).to.be.undefined; expect(link).toBeUndefined();
}); });
it("should return undefined if resolvableLocation has the wrong format", function () { it("should return undefined if resolvableLocation has the wrong format", () => {
const loc = { const loc = {
uri: "file:/path/to/file.ext", uri: "file:/path/to/file.ext",
startLine: 194, startLine: 194,
@@ -68,10 +66,10 @@ describe("getting links to remote (GitHub) locations", function () {
sourceLocationPrefix, sourceLocationPrefix,
); );
expect(link).to.be.undefined; expect(link).toBeUndefined();
}); });
it("should return a remote file ref if the sourceLocationPrefix and resolvableLocation match up", function () { it("should return a remote file ref if the sourceLocationPrefix and resolvableLocation match up", () => {
const loc = { const loc = {
uri: "file:/home/foo/bar/path/to/file.ext", uri: "file:/home/foo/bar/path/to/file.ext",
startLine: 194, startLine: 194,
@@ -88,12 +86,12 @@ describe("getting links to remote (GitHub) locations", function () {
sourceLocationPrefix, sourceLocationPrefix,
); );
expect(link).to.eql( expect(link).toEqual(
"https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237", "https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237",
); );
}); });
it("should return undefined if the sourceLocationPrefix is missing and resolvableLocation doesn't match the default format", function () { it("should return undefined if the sourceLocationPrefix is missing and resolvableLocation doesn't match the default format", () => {
const loc = { const loc = {
uri: "file:/home/foo/bar/path/to/file.ext", uri: "file:/home/foo/bar/path/to/file.ext",
startLine: 194, startLine: 194,
@@ -110,10 +108,10 @@ describe("getting links to remote (GitHub) locations", function () {
sourceLocationPrefix, sourceLocationPrefix,
); );
expect(link).to.eql(undefined); expect(link).toBeUndefined();
}); });
it("should return a remote file ref if the sourceLocationPrefix is missing, but the resolvableLocation matches the default format", function () { it("should return a remote file ref if the sourceLocationPrefix is missing, but the resolvableLocation matches the default format", () => {
const loc = { const loc = {
uri: "file:/home/runner/work/foo/bar/path/to/file.ext", uri: "file:/home/runner/work/foo/bar/path/to/file.ext",
startLine: 194, startLine: 194,
@@ -130,7 +128,7 @@ describe("getting links to remote (GitHub) locations", function () {
sourceLocationPrefix, sourceLocationPrefix,
); );
expect(link).to.eql( expect(link).toEqual(
"https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237", "https://github.com/owner/repo/blob/sha1234/path/to/file.ext#L194-L237",
); );
}); });

View File

@@ -1,5 +1,3 @@
import { expect } from "chai";
import "mocha";
import { import {
EvaluationLogProblemReporter, EvaluationLogProblemReporter,
EvaluationLogScannerSet, EvaluationLogScannerSet,
@@ -36,8 +34,8 @@ class TestProblemReporter implements EvaluationLogProblemReporter {
} }
} }
describe("log scanners", function () { describe("log scanners", () => {
it("should detect bad join orders", async function () { it("should detect bad join orders", async () => {
const scanners = new EvaluationLogScannerSet(); const scanners = new EvaluationLogScannerSet();
scanners.registerLogScannerProvider(new JoinOrderScannerProvider(() => 50)); scanners.registerLogScannerProvider(new JoinOrderScannerProvider(() => 50));
const summaryPath = path.join( const summaryPath = path.join(
@@ -47,13 +45,13 @@ describe("log scanners", function () {
const problemReporter = new TestProblemReporter(); const problemReporter = new TestProblemReporter();
await scanners.scanLog(summaryPath, problemReporter); await scanners.scanLog(summaryPath, problemReporter);
expect(problemReporter.problems.length).to.equal(1); expect(problemReporter.problems.length).toBe(1);
expect(problemReporter.problems[0].predicateName).to.equal("#select#ff"); expect(problemReporter.problems[0].predicateName).toBe("#select#ff");
expect(problemReporter.problems[0].raHash).to.equal( expect(problemReporter.problems[0].raHash).toBe(
"1bb43c97jpmuh8r2v0f9hktim63", "1bb43c97jpmuh8r2v0f9hktim63",
); );
expect(problemReporter.problems[0].iteration).to.equal(0); expect(problemReporter.problems[0].iteration).toBe(0);
expect(problemReporter.problems[0].message).to.equal( expect(problemReporter.problems[0].message).toBe(
"Relation '#select#ff' has an inefficient join order. Its join order metric is 4961.83, which is larger than the threshold of 50.00.", "Relation '#select#ff' has an inefficient join order. Its join order metric is 4961.83, which is larger than the threshold of 50.00.",
); );
}); });

View File

@@ -1,49 +1,47 @@
import { expect } from "chai";
import * as path from "path"; import * as path from "path";
import "mocha";
import { parseViewerData } from "../../src/pure/log-summary-parser"; import { parseViewerData } from "../../src/pure/log-summary-parser";
describe("Evaluator log summary tests", async function () { describe("Evaluator log summary tests", () => {
describe("for a valid summary text", async function () { describe("for a valid summary text", () => {
it("should return only valid EvalLogData objects", async function () { it("should return only valid EvalLogData objects", async () => {
const validSummaryPath = path.join( const validSummaryPath = path.join(
__dirname, __dirname,
"evaluator-log-summaries/valid-summary.jsonl", "evaluator-log-summaries/valid-summary.jsonl",
); );
const logDataItems = await parseViewerData(validSummaryPath); const logDataItems = await parseViewerData(validSummaryPath);
expect(logDataItems).to.not.be.undefined; expect(logDataItems).toBeDefined();
expect(logDataItems.length).to.eq(3); expect(logDataItems.length).toBe(3);
for (const item of logDataItems) { for (const item of logDataItems) {
expect(item.predicateName).to.not.be.empty; expect(item.predicateName).not.toHaveLength(0);
expect(item.millis).to.be.a("number"); expect(item.millis).toEqual(expect.any(Number));
expect(item.resultSize).to.be.a("number"); expect(item.resultSize).toEqual(expect.any(Number));
expect(item.ra).to.not.be.undefined; expect(item.ra).toBeDefined();
expect(item.ra).to.not.be.empty; expect(Object.keys(item.ra)).not.toHaveLength(0);
for (const [pipeline, steps] of Object.entries(item.ra)) { for (const [pipeline, steps] of Object.entries(item.ra)) {
expect(pipeline).to.not.be.empty; expect(Object.keys(pipeline)).not.toHaveLength(0);
expect(steps).to.not.be.undefined; expect(steps).toBeDefined();
expect(steps.length).to.be.greaterThan(0); expect(steps.length).toBeGreaterThan(0);
} }
} }
}); });
it("should not parse a summary header object", async function () { it("should not parse a summary header object", async () => {
const invalidHeaderPath = path.join( const invalidHeaderPath = path.join(
__dirname, __dirname,
"evaluator-log-summaries/invalid-header.jsonl", "evaluator-log-summaries/invalid-header.jsonl",
); );
const logDataItems = await parseViewerData(invalidHeaderPath); const logDataItems = await parseViewerData(invalidHeaderPath);
expect(logDataItems.length).to.eq(0); expect(logDataItems.length).toBe(0);
}); });
it("should not parse a log event missing RA or millis fields", async function () { it("should not parse a log event missing RA or millis fields", async () => {
const invalidSummaryPath = path.join( const invalidSummaryPath = path.join(
__dirname, __dirname,
"evaluator-log-summaries/invalid-summary.jsonl", "evaluator-log-summaries/invalid-summary.jsonl",
); );
const logDataItems = await parseViewerData(invalidSummaryPath); const logDataItems = await parseViewerData(invalidSummaryPath);
expect(logDataItems.length).to.eq(0); expect(logDataItems.length).toBe(0);
}); });
}); });
}); });

View File

@@ -1,26 +1,37 @@
import "chai/register-should";
import * as chai from "chai";
import * as fs from "fs-extra"; import * as fs from "fs-extra";
import * as path from "path"; import * as path from "path";
import * as tmp from "tmp"; import * as tmp from "tmp";
import "mocha"; import { OutputChannelLogger } from "../../src/logging";
import * as sinonChai from "sinon-chai";
import * as sinon from "sinon";
import * as pq from "proxyquire";
const proxyquire = pq.noPreserveCache().noCallThru(); jest.setTimeout(999999);
chai.use(sinonChai);
const expect = chai.expect; jest.mock(
"vscode",
() => {
const mockOutputChannel = {
append: jest.fn(),
appendLine: jest.fn(),
show: jest.fn(),
dispose: jest.fn(),
};
return {
window: {
createOutputChannel: () => mockOutputChannel,
},
mockOutputChannel,
};
},
{
virtual: true,
},
);
describe("OutputChannelLogger tests", function () { describe("OutputChannelLogger tests", function () {
this.timeout(999999);
let OutputChannelLogger;
const tempFolders: Record<string, tmp.DirResult> = {}; const tempFolders: Record<string, tmp.DirResult> = {};
let logger: any; let logger: any;
let mockOutputChannel: Record<string, sinon.SinonStub>;
beforeEach(async () => { beforeEach(async () => {
OutputChannelLogger = createModule().OutputChannelLogger;
tempFolders.globalStoragePath = tmp.dirSync({ tempFolders.globalStoragePath = tmp.dirSync({
prefix: "logging-tests-global", prefix: "logging-tests-global",
}); });
@@ -35,21 +46,22 @@ describe("OutputChannelLogger tests", function () {
tempFolders.storagePath.removeCallback(); tempFolders.storagePath.removeCallback();
}); });
// eslint-disable-next-line @typescript-eslint/no-var-requires
const mockOutputChannel = require("vscode").mockOutputChannel;
it("should log to the output channel", async () => { it("should log to the output channel", async () => {
await logger.log("xxx"); await logger.log("xxx");
expect(mockOutputChannel.appendLine).to.have.been.calledWith("xxx"); expect(mockOutputChannel.appendLine).toBeCalledWith("xxx");
expect(mockOutputChannel.append).not.to.have.been.calledWith("xxx"); expect(mockOutputChannel.append).not.toBeCalledWith("xxx");
await logger.log("yyy", { trailingNewline: false }); await logger.log("yyy", { trailingNewline: false });
expect(mockOutputChannel.appendLine).not.to.have.been.calledWith("yyy"); expect(mockOutputChannel.appendLine).not.toBeCalledWith("yyy");
expect(mockOutputChannel.append).to.have.been.calledWith("yyy"); expect(mockOutputChannel.append).toBeCalledWith("yyy");
await logger.log("zzz", createLogOptions("hucairz")); await logger.log("zzz", createLogOptions("hucairz"));
// should have created 1 side log // should have created 1 side log
expect(fs.readdirSync(tempFolders.storagePath.name)).to.deep.equal([ expect(fs.readdirSync(tempFolders.storagePath.name)).toEqual(["hucairz"]);
"hucairz",
]);
}); });
it("should create a side log", async () => { it("should create a side log", async () => {
@@ -59,42 +71,20 @@ describe("OutputChannelLogger tests", function () {
await logger.log("aaa"); await logger.log("aaa");
// expect 2 side logs // expect 2 side logs
expect(fs.readdirSync(tempFolders.storagePath.name).length).to.equal(2); expect(fs.readdirSync(tempFolders.storagePath.name).length).toBe(2);
// contents // contents
expect( expect(
fs.readFileSync(path.join(tempFolders.storagePath.name, "first"), "utf8"), fs.readFileSync(path.join(tempFolders.storagePath.name, "first"), "utf8"),
).to.equal("xxx\nzzz"); ).toBe("xxx\nzzz");
expect( expect(
fs.readFileSync( fs.readFileSync(
path.join(tempFolders.storagePath.name, "second"), path.join(tempFolders.storagePath.name, "second"),
"utf8", "utf8",
), ),
).to.equal("yyy\n"); ).toBe("yyy\n");
}); });
function createModule(): any {
mockOutputChannel = {
append: sinon.stub(),
appendLine: sinon.stub(),
show: sinon.stub(),
dispose: sinon.stub(),
};
return proxyquire("../../src/logging", {
vscode: {
window: {
createOutputChannel: () => mockOutputChannel,
},
Disposable: function () {
/**/
},
"@noCallThru": true,
"@global": true,
},
});
}
function createLogOptions( function createLogOptions(
additionalLogLocation: string, additionalLogLocation: string,
trailingNewline?: boolean, trailingNewline?: boolean,

View File

@@ -1,12 +1,9 @@
import { expect } from "chai";
import "mocha";
import { formatDecimal } from "../../src/pure/number"; import { formatDecimal } from "../../src/pure/number";
describe("Number", () => { describe("Number", () => {
it("should return a formatted decimal", () => { it("should return a formatted decimal", () => {
expect(formatDecimal(9)).to.eq("9"); expect(formatDecimal(9)).toBe("9");
expect(formatDecimal(10_000)).to.eq("10,000"); expect(formatDecimal(10_000)).toBe("10,000");
expect(formatDecimal(100_000_000_000)).to.eq("100,000,000,000"); expect(formatDecimal(100_000_000_000)).toBe("100,000,000,000");
}); });
}); });

View File

@@ -1,5 +1,3 @@
import { expect } from "chai";
import * as Octokit from "@octokit/rest"; import * as Octokit from "@octokit/rest";
import { retry } from "@octokit/plugin-retry"; import { retry } from "@octokit/plugin-retry";
@@ -25,9 +23,9 @@ const mockCredentials = {
} as unknown as Credentials; } as unknown as Credentials;
const mockServer = new MockGitHubApiServer(); const mockServer = new MockGitHubApiServer();
before(() => mockServer.startServer()); beforeAll(() => mockServer.startServer());
afterEach(() => mockServer.unloadScenario()); afterEach(() => mockServer.unloadScenario());
after(() => mockServer.stopServer()); afterAll(() => mockServer.stopServer());
const controllerRepoId = variantAnalysisJson.response.body.controller_repo.id; const controllerRepoId = variantAnalysisJson.response.body.controller_repo.id;
const variantAnalysisId = variantAnalysisJson.response.body.id; const variantAnalysisId = variantAnalysisJson.response.body.id;
@@ -42,8 +40,8 @@ describe("submitVariantAnalysis", () => {
createMockSubmission(), createMockSubmission(),
); );
expect(result).not.to.be.undefined; expect(result).toBeDefined();
expect(result.id).to.eq(variantAnalysisId); expect(result.id).toBe(variantAnalysisId);
}); });
}); });
@@ -57,8 +55,8 @@ describe("getVariantAnalysis", () => {
variantAnalysisId, variantAnalysisId,
); );
expect(result).not.to.be.undefined; expect(result).toBeDefined();
expect(result.status).not.to.be.undefined; expect(result.status).toBeDefined();
}); });
}); });
@@ -73,8 +71,8 @@ describe("getVariantAnalysisRepo", () => {
repoTaskId, repoTaskId,
); );
expect(result).not.to.be.undefined; expect(result).toBeDefined();
expect(result.repository.id).to.eq(repoTaskId); expect(result.repository.id).toBe(repoTaskId);
}); });
}); });
@@ -87,9 +85,9 @@ describe("getVariantAnalysisRepoResult", () => {
`https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/${variantAnalysisId}/${repoTaskId}/${faker.datatype.uuid()}`, `https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/${variantAnalysisId}/${repoTaskId}/${faker.datatype.uuid()}`,
); );
expect(result).not.to.be.undefined; expect(result).toBeDefined();
expect(result).to.be.an("ArrayBuffer"); expect(result).toBeInstanceOf(ArrayBuffer);
expect(result.byteLength).to.eq( expect(result.byteLength).toBe(
variantAnalysisRepoJson.response.body.artifact_size_in_bytes, variantAnalysisRepoJson.response.body.artifact_size_in_bytes,
); );
}); });
@@ -105,7 +103,7 @@ describe("getRepositoryFromNwo", () => {
"mrva-demo-controller-repo", "mrva-demo-controller-repo",
); );
expect(result).not.to.be.undefined; expect(result).toBeDefined();
expect(result.id).to.eq(getRepoJson.response.body.id); expect(result.id).toBe(getRepoJson.response.body.id);
}); });
}); });

View File

@@ -1,4 +1,3 @@
import { expect } from "chai";
import * as path from "path"; import * as path from "path";
import * as fs from "fs-extra"; import * as fs from "fs-extra";
import { import {
@@ -6,9 +5,9 @@ import {
MarkdownFile, MarkdownFile,
} from "../../../../src/remote-queries/remote-queries-markdown-generation"; } from "../../../../src/remote-queries/remote-queries-markdown-generation";
describe("markdown generation", async function () { describe("markdown generation", () => {
describe("for path-problem query", async function () { describe("for path-problem query", () => {
it("should generate markdown file for each repo with results", async function () { it("should generate markdown file for each repo with results", async () => {
const pathProblemQuery = JSON.parse( const pathProblemQuery = JSON.parse(
await fs.readFile( await fs.readFile(
path.join( path.join(
@@ -42,8 +41,8 @@ describe("markdown generation", async function () {
}); });
}); });
describe("for problem query", async function () { describe("for problem query", () => {
it("should generate markdown file for each repo with results", async function () { it("should generate markdown file for each repo with results", async () => {
const problemQuery = JSON.parse( const problemQuery = JSON.parse(
await fs.readFile( await fs.readFile(
path.join( path.join(
@@ -76,8 +75,8 @@ describe("markdown generation", async function () {
}); });
}); });
describe("for non-alert query", async function () { describe("for non-alert query", () => {
it("should generate markdown file for each repo with results", async function () { it("should generate markdown file for each repo with results", async () => {
const query = JSON.parse( const query = JSON.parse(
await fs.readFile( await fs.readFile(
path.join(__dirname, "data/raw-results/query.json"), path.join(__dirname, "data/raw-results/query.json"),
@@ -118,16 +117,16 @@ async function checkGeneratedMarkdown(
const expectedDir = path.join(__dirname, testDataBasePath); const expectedDir = path.join(__dirname, testDataBasePath);
const expectedFiles = await fs.readdir(expectedDir); const expectedFiles = await fs.readdir(expectedDir);
expect(actualFiles.length).to.equal(expectedFiles.length); expect(actualFiles.length).toBe(expectedFiles.length);
for (const expectedFile of expectedFiles) { for (const expectedFile of expectedFiles) {
const actualFile = actualFiles.find( const actualFile = actualFiles.find(
(f) => `${f.fileName}.md` === expectedFile, (f) => `${f.fileName}.md` === expectedFile,
); );
expect(actualFile).to.not.be.undefined; expect(actualFile).toBeDefined();
const expectedContent = await readTestOutputFile( const expectedContent = await readTestOutputFile(
path.join(testDataBasePath, expectedFile), path.join(testDataBasePath, expectedFile),
); );
expect(actualFile!.content.join("\n")).to.equal(expectedContent); expect(actualFile!.content.join("\n")).toBe(expectedContent);
} }
} }

View File

@@ -1,4 +1,3 @@
import { expect } from "chai";
import { faker } from "@faker-js/faker"; import { faker } from "@faker-js/faker";
import { VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository } from "../../../src/remote-queries/gh-api/variant-analysis"; import { VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository } from "../../../src/remote-queries/gh-api/variant-analysis";
import { import {
@@ -20,7 +19,7 @@ import { createMockApiResponse } from "../../../src/vscode-tests/factories/remot
import { createMockSubmission } from "../../../src/vscode-tests/factories/remote-queries/shared/variant-analysis-submission"; import { createMockSubmission } from "../../../src/vscode-tests/factories/remote-queries/shared/variant-analysis-submission";
import { createMockVariantAnalysisRepoTask } from "../../../src/vscode-tests/factories/remote-queries/gh-api/variant-analysis-repo-task"; import { createMockVariantAnalysisRepoTask } from "../../../src/vscode-tests/factories/remote-queries/gh-api/variant-analysis-repo-task";
describe(processVariantAnalysis.name, function () { describe(processVariantAnalysis.name, () => {
const scannedRepos = createMockScannedRepos(); const scannedRepos = createMockScannedRepos();
const skippedRepos = createMockSkippedRepos(); const skippedRepos = createMockSkippedRepos();
const mockApiResponse = createMockApiResponse( const mockApiResponse = createMockApiResponse(
@@ -40,7 +39,7 @@ describe(processVariantAnalysis.name, function () {
over_limit_repos, over_limit_repos,
} = skippedRepos; } = skippedRepos;
expect(result).to.eql({ expect(result).toEqual({
id: mockApiResponse.id, id: mockApiResponse.id,
controllerRepo: { controllerRepo: {
id: mockApiResponse.controller_repo.id, id: mockApiResponse.controller_repo.id,
@@ -179,7 +178,7 @@ describe(processVariantAnalysisRepositoryTask.name, () => {
const mockApiResponse = createMockVariantAnalysisRepoTask(); const mockApiResponse = createMockVariantAnalysisRepoTask();
it("should return the correct result", () => { it("should return the correct result", () => {
expect(processVariantAnalysisRepositoryTask(mockApiResponse)).to.deep.eq({ expect(processVariantAnalysisRepositoryTask(mockApiResponse)).toEqual({
repository: { repository: {
id: mockApiResponse.repository.id, id: mockApiResponse.repository.id,
fullName: mockApiResponse.repository.full_name, fullName: mockApiResponse.repository.full_name,
@@ -204,7 +203,7 @@ describe(processScannedRepository.name, () => {
); );
it("should return the correct result", () => { it("should return the correct result", () => {
expect(processScannedRepository(mockApiResponse)).to.deep.eq({ expect(processScannedRepository(mockApiResponse)).toEqual({
repository: { repository: {
id: mockApiResponse.repository.id, id: mockApiResponse.repository.id,
fullName: mockApiResponse.repository.full_name, fullName: mockApiResponse.repository.full_name,

View File

@@ -1,6 +1,3 @@
import "mocha";
import * as chaiAsPromised from "chai-as-promised";
import * as chai from "chai";
import * as sarif from "sarif"; import * as sarif from "sarif";
import { import {
extractAnalysisAlerts, extractAnalysisAlerts,
@@ -12,9 +9,6 @@ import {
AnalysisMessageLocationToken, AnalysisMessageLocationToken,
} from "../../src/remote-queries/shared/analysis-result"; } from "../../src/remote-queries/shared/analysis-result";
chai.use(chaiAsPromised);
const expect = chai.expect;
describe("SARIF processing", () => { describe("SARIF processing", () => {
describe("tryGetRule", () => { describe("tryGetRule", () => {
describe("Using the tool driver", () => { describe("Using the tool driver", () => {
@@ -30,7 +24,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return undefined if rule missing from tool driver", () => { it("should return undefined if rule missing from tool driver", () => {
@@ -60,7 +54,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return rule if it has been set on the tool driver", () => { it("should return rule if it has been set on the tool driver", () => {
@@ -87,8 +81,8 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.ok; expect(rule).toBeTruthy();
expect(rule!.id).to.equal(result!.rule!.id); expect(rule!.id).toBe(result!.rule!.id);
}); });
}); });
@@ -136,7 +130,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return undefined if tool component index not set", () => { it("should return undefined if tool component index not set", () => {
@@ -182,7 +176,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return undefined if tool extensions not set", () => { it("should return undefined if tool extensions not set", () => {
@@ -205,7 +199,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return undefined if tool extensions do not contain index", () => { it("should return undefined if tool extensions do not contain index", () => {
@@ -241,7 +235,7 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.undefined; expect(rule).toBeUndefined();
}); });
it("should return rule if all information is defined", () => { it("should return rule if all information is defined", () => {
@@ -288,8 +282,8 @@ describe("SARIF processing", () => {
const rule = tryGetRule(sarifRun, result); const rule = tryGetRule(sarifRun, result);
expect(rule).to.be.ok; expect(rule).toBeTruthy();
expect(rule!.id).to.equal("D"); expect(rule!.id).toBe("D");
}); });
}); });
}); });
@@ -308,7 +302,7 @@ describe("SARIF processing", () => {
} as sarif.Run; } as sarif.Run;
const severity = tryGetSeverity(sarifRun, result, rule); const severity = tryGetSeverity(sarifRun, result, rule);
expect(severity).to.be.undefined; expect(severity).toBeUndefined();
}); });
it("should return undefined if severity not set on rule", () => { it("should return undefined if severity not set on rule", () => {
@@ -336,7 +330,7 @@ describe("SARIF processing", () => {
} as sarif.Run; } as sarif.Run;
const severity = tryGetSeverity(sarifRun, result, rule); const severity = tryGetSeverity(sarifRun, result, rule);
expect(severity).to.be.undefined; expect(severity).toBeUndefined();
}); });
const severityMap = { const severityMap = {
@@ -371,7 +365,7 @@ describe("SARIF processing", () => {
} as sarif.Run; } as sarif.Run;
const severity = tryGetSeverity(sarifRun, result, rule); const severity = tryGetSeverity(sarifRun, result, rule);
expect(severity).to.equal(parsedSeverity); expect(severity).toBe(parsedSeverity);
}); });
}); });
}); });
@@ -385,8 +379,8 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.alerts.length).to.equal(0); expect(result.alerts.length).toBe(0);
}); });
it("should not return any results for runs that have no results", () => { it("should not return any results for runs that have no results", () => {
@@ -403,8 +397,8 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.alerts.length).to.equal(0); expect(result.alerts.length).toBe(0);
}); });
it("should return errors for results that have no message", () => { it("should return errors for results that have no message", () => {
@@ -413,8 +407,8 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.errors.length).to.equal(1); expect(result.errors.length).toBe(1);
expectResultParsingError(result.errors[0]); expectResultParsingError(result.errors[0]);
}); });
@@ -433,9 +427,9 @@ describe("SARIF processing", () => {
const actualCodeSnippet = result.alerts[0].codeSnippet; const actualCodeSnippet = result.alerts[0].codeSnippet;
expect(result).to.be.ok; expect(result).toBeTruthy();
expectNoParsingError(result); expectNoParsingError(result);
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet); expect(actualCodeSnippet).toEqual(expectedCodeSnippet);
}); });
it("should use highlightedRegion for result locations with no contextRegion", () => { it("should use highlightedRegion for result locations with no contextRegion", () => {
@@ -453,9 +447,9 @@ describe("SARIF processing", () => {
const actualCodeSnippet = result.alerts[0].codeSnippet; const actualCodeSnippet = result.alerts[0].codeSnippet;
expect(result).to.be.ok; expect(result).toBeTruthy();
expectNoParsingError(result); expectNoParsingError(result);
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet); expect(actualCodeSnippet).toEqual(expectedCodeSnippet);
}); });
it("should not return errors for result locations with no region", () => { it("should not return errors for result locations with no region", () => {
@@ -465,8 +459,8 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.alerts.length).to.equal(1); expect(result.alerts.length).toBe(1);
expectNoParsingError(result); expectNoParsingError(result);
}); });
@@ -477,8 +471,8 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.errors.length).to.equal(1); expect(result.errors.length).toBe(1);
expectResultParsingError(result.errors[0]); expectResultParsingError(result.errors[0]);
}); });
@@ -565,31 +559,31 @@ describe("SARIF processing", () => {
} as sarif.Log; } as sarif.Log;
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.errors.length).to.equal(0); expect(result.errors.length).toBe(0);
expect(result.alerts.length).to.equal(3); expect(result.alerts.length).toBe(3);
expect( expect(
result.alerts.find( result.alerts.find(
(a) => (a) =>
getMessageText(a.message) === "msg1" && getMessageText(a.message) === "msg1" &&
a.codeSnippet!.text === "foo", a.codeSnippet!.text === "foo",
), ),
).to.be.ok; ).toBeTruthy();
expect( expect(
result.alerts.find( result.alerts.find(
(a) => (a) =>
getMessageText(a.message) === "msg1" && getMessageText(a.message) === "msg1" &&
a.codeSnippet!.text === "bar", a.codeSnippet!.text === "bar",
), ),
).to.be.ok; ).toBeTruthy();
expect( expect(
result.alerts.find( result.alerts.find(
(a) => (a) =>
getMessageText(a.message) === "msg2" && getMessageText(a.message) === "msg2" &&
a.codeSnippet!.text === "baz", a.codeSnippet!.text === "baz",
), ),
).to.be.ok; ).toBeTruthy();
expect(result.alerts.every((a) => a.severity === "Warning")).to.be.true; expect(result.alerts.every((a) => a.severity === "Warning")).toBe(true);
}); });
it("should deal with complex messages", () => { it("should deal with complex messages", () => {
@@ -615,20 +609,20 @@ describe("SARIF processing", () => {
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix); const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
expect(result).to.be.ok; expect(result).toBeTruthy();
expect(result.errors.length).to.equal(0); expect(result.errors.length).toBe(0);
expect(result.alerts.length).to.equal(1); expect(result.alerts.length).toBe(1);
const message = result.alerts[0].message; const message = result.alerts[0].message;
expect(message.tokens.length).to.equal(3); expect(message.tokens.length).toBe(3);
expect(message.tokens[0].t).to.equal("text"); expect(message.tokens[0].t).toBe("text");
expect(message.tokens[0].text).to.equal( expect(message.tokens[0].text).toBe(
"This shell command depends on an uncontrolled ", "This shell command depends on an uncontrolled ",
); );
expect(message.tokens[1].t).to.equal("location"); expect(message.tokens[1].t).toBe("location");
expect(message.tokens[1].text).to.equal("absolute path"); expect(message.tokens[1].text).toBe("absolute path");
expect( expect(
(message.tokens[1] as AnalysisMessageLocationToken).location, (message.tokens[1] as AnalysisMessageLocationToken).location,
).to.deep.equal({ ).toEqual({
fileLink: { fileLink: {
fileLinkPrefix: fakefileLinkPrefix, fileLinkPrefix: fakefileLinkPrefix,
filePath: "npm-packages/meteor-installer/config.js", filePath: "npm-packages/meteor-installer/config.js",
@@ -640,18 +634,18 @@ describe("SARIF processing", () => {
endColumn: 60, endColumn: 60,
}, },
}); });
expect(message.tokens[2].t).to.equal("text"); expect(message.tokens[2].t).toBe("text");
expect(message.tokens[2].text).to.equal("."); expect(message.tokens[2].text).toBe(".");
}); });
}); });
function expectResultParsingError(msg: string) { function expectResultParsingError(msg: string) {
expect(msg.startsWith("Error when processing SARIF result")).to.be.true; expect(msg.startsWith("Error when processing SARIF result")).toBe(true);
} }
function expectNoParsingError(result: { errors: string[] }) { function expectNoParsingError(result: { errors: string[] }) {
const array = result.errors; const array = result.errors;
expect(array.length, array.join()).to.equal(0); expect(array).toEqual([]);
} }
function buildValidSarifLog(): sarif.Log { function buildValidSarifLog(): sarif.Log {

View File

@@ -1,5 +1,3 @@
import "mocha";
import { expect } from "chai";
import * as Sarif from "sarif"; import * as Sarif from "sarif";
import { import {
@@ -10,74 +8,72 @@ import {
} from "../../src/pure/sarif-utils"; } from "../../src/pure/sarif-utils";
describe("parsing sarif", () => { describe("parsing sarif", () => {
it("should be able to parse a simple message from the spec", async function () { it("should be able to parse a simple message from the spec", async () => {
const message = "Tainted data was used. The data came from [here](3)."; const message = "Tainted data was used. The data came from [here](3).";
const results = parseSarifPlainTextMessage(message); const results = parseSarifPlainTextMessage(message);
expect(results).to.deep.equal([ expect(results).toEqual([
"Tainted data was used. The data came from ", "Tainted data was used. The data came from ",
{ dest: 3, text: "here" }, { dest: 3, text: "here" },
".", ".",
]); ]);
}); });
it("should be able to parse a complex message from the spec", async function () { it("should be able to parse a complex message from the spec", async () => {
const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\]](1)."; const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\]](1).";
const results = parseSarifPlainTextMessage(message); const results = parseSarifPlainTextMessage(message);
expect(results).to.deep.equal([ expect(results).toEqual([
"Prohibited term used in ", "Prohibited term used in ",
{ dest: 1, text: "para[0]\\spans[2]" }, { dest: 1, text: "para[0]\\spans[2]" },
".", ".",
]); ]);
}); });
it("should be able to parse a broken complex message from the spec", async function () { it("should be able to parse a broken complex message from the spec", async () => {
const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\](1)."; const message = "Prohibited term used in [para\\[0\\]\\\\spans\\[2\\](1).";
const results = parseSarifPlainTextMessage(message); const results = parseSarifPlainTextMessage(message);
expect(results).to.deep.equal([ expect(results).toEqual(["Prohibited term used in [para[0]\\spans[2](1)."]);
"Prohibited term used in [para[0]\\spans[2](1).",
]);
}); });
it("should be able to parse a message with extra escaping the spec", async function () { it("should be able to parse a message with extra escaping the spec", async () => {
const message = "Tainted data was used. The data came from \\[here](3)."; const message = "Tainted data was used. The data came from \\[here](3).";
const results = parseSarifPlainTextMessage(message); const results = parseSarifPlainTextMessage(message);
expect(results).to.deep.equal([ expect(results).toEqual([
"Tainted data was used. The data came from [here](3).", "Tainted data was used. The data came from [here](3).",
]); ]);
}); });
it("should unescape sarif text", () => { it("should unescape sarif text", () => {
expect(unescapeSarifText("\\\\ \\\\ \\[ \\[ \\] \\]")).to.eq( expect(unescapeSarifText("\\\\ \\\\ \\[ \\[ \\] \\]")).toBe(
"\\ \\ [ [ ] ]", "\\ \\ [ [ ] ]",
); );
// Also show that unescaped special chars are unchanged...is this correct? // Also show that unescaped special chars are unchanged...is this correct?
expect(unescapeSarifText("\\ \\ [ [ ] ]")).to.eq("\\ \\ [ [ ] ]"); expect(unescapeSarifText("\\ \\ [ [ ] ]")).toBe("\\ \\ [ [ ] ]");
}); });
it("should normalize source locations", () => { it("should normalize source locations", () => {
expect(getPathRelativeToSourceLocationPrefix("C:\\a\\b", "?x=test")).to.eq( expect(getPathRelativeToSourceLocationPrefix("C:\\a\\b", "?x=test")).toBe(
"file:/C:/a/b/?x=test", "file:/C:/a/b/?x=test",
); );
expect( expect(
getPathRelativeToSourceLocationPrefix("C:\\a\\b", "%3Fx%3Dtest"), getPathRelativeToSourceLocationPrefix("C:\\a\\b", "%3Fx%3Dtest"),
).to.eq("file:/C:/a/b/%3Fx%3Dtest"); ).toBe("file:/C:/a/b/%3Fx%3Dtest");
expect( expect(
getPathRelativeToSourceLocationPrefix("C:\\a =\\b c?", "?x=test"), getPathRelativeToSourceLocationPrefix("C:\\a =\\b c?", "?x=test"),
).to.eq("file:/C:/a%20%3D/b%20c%3F/?x=test"); ).toBe("file:/C:/a%20%3D/b%20c%3F/?x=test");
expect(getPathRelativeToSourceLocationPrefix("/a/b/c", "?x=test")).to.eq( expect(getPathRelativeToSourceLocationPrefix("/a/b/c", "?x=test")).toBe(
"file:/a/b/c/?x=test", "file:/a/b/c/?x=test",
); );
}); });
describe("parseSarifLocation", () => { describe("parseSarifLocation", () => {
it('should parse a sarif location with "no location"', () => { it('should parse a sarif location with "no location"', () => {
expect(parseSarifLocation({}, "")).to.deep.equal({ expect(parseSarifLocation({}, "")).toEqual({
hint: "no physical location", hint: "no physical location",
}); });
expect(parseSarifLocation({ physicalLocation: {} }, "")).to.deep.equal({ expect(parseSarifLocation({ physicalLocation: {} }, "")).toEqual({
hint: "no artifact location", hint: "no artifact location",
}); });
expect( expect(
parseSarifLocation({ physicalLocation: { artifactLocation: {} } }, ""), parseSarifLocation({ physicalLocation: { artifactLocation: {} } }, ""),
).to.deep.equal({ ).toEqual({
hint: "artifact location has no uri", hint: "artifact location has no uri",
}); });
}); });
@@ -90,7 +86,7 @@ describe("parsing sarif", () => {
}, },
}, },
}; };
expect(parseSarifLocation(location, "prefix")).to.deep.equal({ expect(parseSarifLocation(location, "prefix")).toEqual({
uri: "file:/prefix/abc?x=test", uri: "file:/prefix/abc?x=test",
userVisibleFile: "abc?x=test", userVisibleFile: "abc?x=test",
}); });
@@ -104,7 +100,7 @@ describe("parsing sarif", () => {
}, },
}, },
}; };
expect(parseSarifLocation(location, "prefix")).to.deep.equal({ expect(parseSarifLocation(location, "prefix")).toEqual({
uri: "file:/abc%3Fx%3Dtest", uri: "file:/abc%3Fx%3Dtest",
userVisibleFile: "/abc?x=test", userVisibleFile: "/abc?x=test",
}); });
@@ -124,7 +120,7 @@ describe("parsing sarif", () => {
}, },
}, },
}; };
expect(parseSarifLocation(location, "prefix")).to.deep.equal({ expect(parseSarifLocation(location, "prefix")).toEqual({
uri: "file:abc%3Fx%3Dtest", uri: "file:abc%3Fx%3Dtest",
userVisibleFile: "abc?x=test", userVisibleFile: "abc?x=test",
startLine: 1, startLine: 1,

View File

@@ -1,89 +1,86 @@
import { expect } from "chai";
import "mocha";
import { humanizeRelativeTime, humanizeUnit } from "../../src/pure/time"; import { humanizeRelativeTime, humanizeUnit } from "../../src/pure/time";
describe("Time", () => { describe("Time", () => {
it("should return a humanized unit", () => { it("should return a humanized unit", () => {
expect(humanizeUnit(undefined)).to.eq("Less than a second"); expect(humanizeUnit(undefined)).toBe("Less than a second");
expect(humanizeUnit(0)).to.eq("Less than a second"); expect(humanizeUnit(0)).toBe("Less than a second");
expect(humanizeUnit(-1)).to.eq("Less than a second"); expect(humanizeUnit(-1)).toBe("Less than a second");
expect(humanizeUnit(1000 - 1)).to.eq("Less than a second"); expect(humanizeUnit(1000 - 1)).toBe("Less than a second");
expect(humanizeUnit(1000)).to.eq("1 second"); expect(humanizeUnit(1000)).toBe("1 second");
expect(humanizeUnit(1000 * 2)).to.eq("2 seconds"); expect(humanizeUnit(1000 * 2)).toBe("2 seconds");
expect(humanizeUnit(1000 * 60 - 1)).to.eq("59 seconds"); expect(humanizeUnit(1000 * 60 - 1)).toBe("59 seconds");
expect(humanizeUnit(1000 * 60)).to.eq("1 minute"); expect(humanizeUnit(1000 * 60)).toBe("1 minute");
expect(humanizeUnit(1000 * 60 * 2 - 1)).to.eq("1 minute"); expect(humanizeUnit(1000 * 60 * 2 - 1)).toBe("1 minute");
expect(humanizeUnit(1000 * 60 * 2)).to.eq("2 minutes"); expect(humanizeUnit(1000 * 60 * 2)).toBe("2 minutes");
expect(humanizeUnit(1000 * 60 * 60)).to.eq("1 hour"); expect(humanizeUnit(1000 * 60 * 60)).toBe("1 hour");
expect(humanizeUnit(1000 * 60 * 60 * 2)).to.eq("2 hours"); expect(humanizeUnit(1000 * 60 * 60 * 2)).toBe("2 hours");
expect(humanizeUnit(1000 * 60 * 60 * 24)).to.eq("1 day"); expect(humanizeUnit(1000 * 60 * 60 * 24)).toBe("1 day");
expect(humanizeUnit(1000 * 60 * 60 * 24 * 2)).to.eq("2 days"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 2)).toBe("2 days");
// assume every month has 30 days // assume every month has 30 days
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30)).to.eq("1 month"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 30)).toBe("1 month");
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 2)).to.eq("2 months"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 2)).toBe("2 months");
expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 12)).to.eq("12 months"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 30 * 12)).toBe("12 months");
// assume every year has 365 days // assume every year has 365 days
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365)).to.eq("1 year"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 365)).toBe("1 year");
expect(humanizeUnit(1000 * 60 * 60 * 24 * 365 * 2)).to.eq("2 years"); expect(humanizeUnit(1000 * 60 * 60 * 24 * 365 * 2)).toBe("2 years");
}); });
it("should return a humanized duration positive", () => { it("should return a humanized duration positive", () => {
expect(humanizeRelativeTime(undefined)).to.eq(""); expect(humanizeRelativeTime(undefined)).toBe("");
expect(humanizeRelativeTime(0)).to.eq("this minute"); expect(humanizeRelativeTime(0)).toBe("this minute");
expect(humanizeRelativeTime(1)).to.eq("this minute"); expect(humanizeRelativeTime(1)).toBe("this minute");
expect(humanizeRelativeTime(1000 * 60 - 1)).to.eq("this minute"); expect(humanizeRelativeTime(1000 * 60 - 1)).toBe("this minute");
expect(humanizeRelativeTime(1000 * 60)).to.eq("in 1 minute"); expect(humanizeRelativeTime(1000 * 60)).toBe("in 1 minute");
expect(humanizeRelativeTime(1000 * 60 * 2 - 1)).to.eq("in 1 minute"); expect(humanizeRelativeTime(1000 * 60 * 2 - 1)).toBe("in 1 minute");
expect(humanizeRelativeTime(1000 * 60 * 2)).to.eq("in 2 minutes"); expect(humanizeRelativeTime(1000 * 60 * 2)).toBe("in 2 minutes");
expect(humanizeRelativeTime(1000 * 60 * 60)).to.eq("in 1 hour"); expect(humanizeRelativeTime(1000 * 60 * 60)).toBe("in 1 hour");
expect(humanizeRelativeTime(1000 * 60 * 60 * 2)).to.eq("in 2 hours"); expect(humanizeRelativeTime(1000 * 60 * 60 * 2)).toBe("in 2 hours");
expect(humanizeRelativeTime(1000 * 60 * 60 * 24)).to.eq("tomorrow"); expect(humanizeRelativeTime(1000 * 60 * 60 * 24)).toBe("tomorrow");
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 2)).to.eq("in 2 days"); expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 2)).toBe("in 2 days");
// assume every month has 30 days // assume every month has 30 days
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30)).to.eq("next month"); expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30)).toBe("next month");
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 2)).to.eq( expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 2)).toBe(
"in 2 months", "in 2 months",
); );
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 12)).to.eq( expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 30 * 12)).toBe(
"in 12 months", "in 12 months",
); );
// assume every year has 365 days // assume every year has 365 days
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365)).to.eq("next year"); expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365)).toBe("next year");
expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365 * 2)).to.eq( expect(humanizeRelativeTime(1000 * 60 * 60 * 24 * 365 * 2)).toBe(
"in 2 years", "in 2 years",
); );
}); });
it("should return a humanized duration negative", () => { it("should return a humanized duration negative", () => {
expect(humanizeRelativeTime(-1)).to.eq("this minute"); expect(humanizeRelativeTime(-1)).toBe("this minute");
expect(humanizeRelativeTime(-1000 * 60)).to.eq("1 minute ago"); expect(humanizeRelativeTime(-1000 * 60)).toBe("1 minute ago");
expect(humanizeRelativeTime(-1000 * 60 - 1)).to.eq("1 minute ago"); expect(humanizeRelativeTime(-1000 * 60 - 1)).toBe("1 minute ago");
expect(humanizeRelativeTime(-1000 * 60 * 2)).to.eq("2 minutes ago"); expect(humanizeRelativeTime(-1000 * 60 * 2)).toBe("2 minutes ago");
expect(humanizeRelativeTime(-1000 * 60 * 2 - 1)).to.eq("2 minutes ago"); expect(humanizeRelativeTime(-1000 * 60 * 2 - 1)).toBe("2 minutes ago");
expect(humanizeRelativeTime(-1000 * 60 * 3)).to.eq("3 minutes ago"); expect(humanizeRelativeTime(-1000 * 60 * 3)).toBe("3 minutes ago");
expect(humanizeRelativeTime(-1000 * 60 * 60)).to.eq("1 hour ago"); expect(humanizeRelativeTime(-1000 * 60 * 60)).toBe("1 hour ago");
expect(humanizeRelativeTime(-1000 * 60 * 60 - 1)).to.eq("1 hour ago"); expect(humanizeRelativeTime(-1000 * 60 * 60 - 1)).toBe("1 hour ago");
expect(humanizeRelativeTime(-1000 * 60 * 60 * 2)).to.eq("2 hours ago"); expect(humanizeRelativeTime(-1000 * 60 * 60 * 2)).toBe("2 hours ago");
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24)).to.eq("yesterday"); expect(humanizeRelativeTime(-1000 * 60 * 60 * 24)).toBe("yesterday");
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 2)).to.eq("2 days ago"); expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 2)).toBe("2 days ago");
// assume every month has 30 days // assume every month has 30 days
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30)).to.eq("last month"); expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30)).toBe("last month");
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 2)).to.eq( expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 2)).toBe(
"2 months ago", "2 months ago",
); );
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 12)).to.eq( expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 30 * 12)).toBe(
"12 months ago", "12 months ago",
); );
// assume every year has 365 days // assume every year has 365 days
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365)).to.eq("last year"); expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365)).toBe("last year");
expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365 * 2)).to.eq( expect(humanizeRelativeTime(-1000 * 60 * 60 * 24 * 365 * 2)).toBe(
"2 years ago", "2 years ago",
); );
}); });

View File

@@ -1,4 +1,3 @@
import { expect } from "chai";
import { import {
VariantAnalysis, VariantAnalysis,
parseVariantAnalysisQueryLanguage, parseVariantAnalysisQueryLanguage,
@@ -13,17 +12,17 @@ import { createMockVariantAnalysis } from "../../src/vscode-tests/factories/remo
describe("parseVariantAnalysisQueryLanguage", () => { describe("parseVariantAnalysisQueryLanguage", () => {
it("parses a valid language", () => { it("parses a valid language", () => {
expect(parseVariantAnalysisQueryLanguage("javascript")).to.equal( expect(parseVariantAnalysisQueryLanguage("javascript")).toBe(
VariantAnalysisQueryLanguage.Javascript, VariantAnalysisQueryLanguage.Javascript,
); );
}); });
it("returns undefined for an valid language", () => { it("returns undefined for an valid language", () => {
expect(parseVariantAnalysisQueryLanguage("rubbish")).to.not.exist; expect(parseVariantAnalysisQueryLanguage("rubbish")).toBeFalsy();
}); });
}); });
describe("isVariantAnalysisComplete", async () => { describe("isVariantAnalysisComplete", () => {
let variantAnalysis: VariantAnalysis; let variantAnalysis: VariantAnalysis;
const uncallableArtifactDownloadChecker = () => { const uncallableArtifactDownloadChecker = () => {
throw new Error("Should not be called"); throw new Error("Should not be called");
@@ -33,12 +32,12 @@ describe("isVariantAnalysisComplete", async () => {
variantAnalysis = createMockVariantAnalysis({}); variantAnalysis = createMockVariantAnalysis({});
}); });
describe("when variant analysis status is InProgress", async () => { describe("when variant analysis status is InProgress", () => {
beforeEach(() => { beforeEach(() => {
variantAnalysis.status = VariantAnalysisStatus.InProgress; variantAnalysis.status = VariantAnalysisStatus.InProgress;
}); });
describe("when scanned repos is undefined", async () => { describe("when scanned repos is undefined", () => {
it("should say the variant analysis is not complete", async () => { it("should say the variant analysis is not complete", async () => {
variantAnalysis.scannedRepos = undefined; variantAnalysis.scannedRepos = undefined;
expect( expect(
@@ -46,24 +45,24 @@ describe("isVariantAnalysisComplete", async () => {
variantAnalysis, variantAnalysis,
uncallableArtifactDownloadChecker, uncallableArtifactDownloadChecker,
), ),
).to.equal(false); ).toBe(false);
}); });
}); });
describe("when scanned repos is non-empty", async () => { describe("when scanned repos is non-empty", () => {
describe("when not all results are downloaded", async () => { describe("when not all results are downloaded", () => {
it("should say the variant analysis is not complete", async () => { it("should say the variant analysis is not complete", async () => {
expect( expect(
await isVariantAnalysisComplete(variantAnalysis, async () => false), await isVariantAnalysisComplete(variantAnalysis, async () => false),
).to.equal(false); ).toBe(false);
}); });
}); });
describe("when all results are downloaded", async () => { describe("when all results are downloaded", () => {
it("should say the variant analysis is complete", async () => { it("should say the variant analysis is complete", async () => {
expect( expect(
await isVariantAnalysisComplete(variantAnalysis, async () => true), await isVariantAnalysisComplete(variantAnalysis, async () => true),
).to.equal(false); ).toBe(false);
}); });
}); });
}); });
@@ -74,12 +73,12 @@ describe("isVariantAnalysisComplete", async () => {
VariantAnalysisStatus.Failed, VariantAnalysisStatus.Failed,
VariantAnalysisStatus.Canceled, VariantAnalysisStatus.Canceled,
]) { ]) {
describe(`when variant analysis status is ${variantAnalysisStatus}`, async () => { describe(`when variant analysis status is ${variantAnalysisStatus}`, () => {
beforeEach(() => { beforeEach(() => {
variantAnalysis.status = variantAnalysisStatus; variantAnalysis.status = variantAnalysisStatus;
}); });
describe("when scanned repos is undefined", async () => { describe("when scanned repos is undefined", () => {
it("should say the variant analysis is complete", async () => { it("should say the variant analysis is complete", async () => {
variantAnalysis.scannedRepos = undefined; variantAnalysis.scannedRepos = undefined;
expect( expect(
@@ -87,11 +86,11 @@ describe("isVariantAnalysisComplete", async () => {
variantAnalysis, variantAnalysis,
uncallableArtifactDownloadChecker, uncallableArtifactDownloadChecker,
), ),
).to.equal(true); ).toBe(true);
}); });
}); });
describe("when scanned repos is empty", async () => { describe("when scanned repos is empty", () => {
it("should say the variant analysis is complete", async () => { it("should say the variant analysis is complete", async () => {
variantAnalysis.scannedRepos = []; variantAnalysis.scannedRepos = [];
expect( expect(
@@ -99,11 +98,11 @@ describe("isVariantAnalysisComplete", async () => {
variantAnalysis, variantAnalysis,
uncallableArtifactDownloadChecker, uncallableArtifactDownloadChecker,
), ),
).to.equal(true); ).toBe(true);
}); });
}); });
describe("when a repo scan is still in progress", async () => { describe("when a repo scan is still in progress", () => {
it("should say the variant analysis is not complete", async () => { it("should say the variant analysis is not complete", async () => {
variantAnalysis.scannedRepos = [ variantAnalysis.scannedRepos = [
createMockScannedRepo( createMockScannedRepo(
@@ -114,11 +113,11 @@ describe("isVariantAnalysisComplete", async () => {
]; ];
expect( expect(
await isVariantAnalysisComplete(variantAnalysis, async () => false), await isVariantAnalysisComplete(variantAnalysis, async () => false),
).to.equal(false); ).toBe(false);
}); });
}); });
describe("when not all results are downloaded", async () => { describe("when not all results are downloaded", () => {
it("should say the variant analysis is not complete", async () => { it("should say the variant analysis is not complete", async () => {
variantAnalysis.scannedRepos = [ variantAnalysis.scannedRepos = [
createMockScannedRepo( createMockScannedRepo(
@@ -129,11 +128,11 @@ describe("isVariantAnalysisComplete", async () => {
]; ];
expect( expect(
await isVariantAnalysisComplete(variantAnalysis, async () => false), await isVariantAnalysisComplete(variantAnalysis, async () => false),
).to.equal(false); ).toBe(false);
}); });
}); });
describe("when all results are downloaded", async () => { describe("when all results are downloaded", () => {
it("should say the variant analysis is complete", async () => { it("should say the variant analysis is complete", async () => {
variantAnalysis.scannedRepos = [ variantAnalysis.scannedRepos = [
createMockScannedRepo( createMockScannedRepo(
@@ -144,7 +143,7 @@ describe("isVariantAnalysisComplete", async () => {
]; ];
expect( expect(
await isVariantAnalysisComplete(variantAnalysis, async () => true), await isVariantAnalysisComplete(variantAnalysis, async () => true),
).to.equal(true); ).toBe(true);
}); });
}); });
}); });
@@ -157,7 +156,7 @@ describe("getActionsWorkflowRunUrl", () => {
const actionsWorkflowRunUrl = getActionsWorkflowRunUrl(variantAnalysis); const actionsWorkflowRunUrl = getActionsWorkflowRunUrl(variantAnalysis);
expect(actionsWorkflowRunUrl).to.equal( expect(actionsWorkflowRunUrl).toBe(
`https://github.com/${variantAnalysis.controllerRepo.fullName}/actions/runs/${variantAnalysis.actionsWorkflowRunId}`, `https://github.com/${variantAnalysis.controllerRepo.fullName}/actions/runs/${variantAnalysis.actionsWorkflowRunId}`,
); );
}); });

View File

@@ -1,20 +1,18 @@
import { expect } from "chai";
import { pluralize } from "../../src/pure/word"; import { pluralize } from "../../src/pure/word";
describe("word helpers", () => { describe("word helpers", () => {
describe("pluralize", () => { describe("pluralize", () => {
it("should return the plural form if the number is 0", () => { it("should return the plural form if the number is 0", () => {
expect(pluralize(0, "thing", "things")).to.eq("0 things"); expect(pluralize(0, "thing", "things")).toBe("0 things");
}); });
it("should return the singular form if the number is 1", () => { it("should return the singular form if the number is 1", () => {
expect(pluralize(1, "thing", "things")).to.eq("1 thing"); expect(pluralize(1, "thing", "things")).toBe("1 thing");
}); });
it("should return the plural form if the number is greater than 1", () => { it("should return the plural form if the number is greater than 1", () => {
expect(pluralize(7, "thing", "things")).to.eq("7 things"); expect(pluralize(7, "thing", "things")).toBe("7 things");
}); });
it("should return the empty string if the number is undefined", () => { it("should return the empty string if the number is undefined", () => {
expect(pluralize(undefined, "thing", "things")).to.eq(""); expect(pluralize(undefined, "thing", "things")).toBe("");
}); });
}); });
}); });