Merge branch 'main' into aeisenberg/persist-dbs
This commit is contained in:
10
.github/workflows/main.yml
vendored
10
.github/workflows/main.yml
vendored
@@ -103,6 +103,11 @@ jobs:
|
||||
run: |
|
||||
npm run lint
|
||||
|
||||
- name: Lint scenarios
|
||||
working-directory: extensions/ql-vscode
|
||||
run: |
|
||||
npm run lint:scenarios
|
||||
|
||||
- name: Run unit tests (Linux)
|
||||
working-directory: extensions/ql-vscode
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
@@ -121,7 +126,7 @@ jobs:
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
sudo apt-get install xvfb
|
||||
unset DBUS_SESSION_BUS_ADDRESS
|
||||
/usr/bin/xvfb-run npm run integration
|
||||
|
||||
- name: Run integration tests (Windows)
|
||||
@@ -139,7 +144,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.1', 'nightly']
|
||||
version: ['v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.2', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
@@ -188,6 +193,7 @@ jobs:
|
||||
working-directory: extensions/ql-vscode
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
unset DBUS_SESSION_BUS_ADDRESS
|
||||
/usr/bin/xvfb-run npm run cli-integration
|
||||
|
||||
- name: Run CLI tests (Windows)
|
||||
|
||||
@@ -8,7 +8,8 @@ const config: StorybookConfig = {
|
||||
addons: [
|
||||
'@storybook/addon-links',
|
||||
'@storybook/addon-essentials',
|
||||
'@storybook/addon-interactions'
|
||||
'@storybook/addon-interactions',
|
||||
'./vscode-theme-addon/preset.ts',
|
||||
],
|
||||
framework: '@storybook/react',
|
||||
core: {
|
||||
|
||||
@@ -4,8 +4,6 @@ import { action } from '@storybook/addon-actions';
|
||||
// Allow all stories/components to use Codicons
|
||||
import '@vscode/codicons/dist/codicon.css';
|
||||
|
||||
import '../src/stories/vscode-theme-dark.css';
|
||||
|
||||
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
|
||||
export const parameters = {
|
||||
// All props starting with `on` will automatically receive an action as a prop
|
||||
@@ -22,13 +20,8 @@ export const parameters = {
|
||||
theme: themes.dark,
|
||||
},
|
||||
backgrounds: {
|
||||
default: 'dark',
|
||||
values: [
|
||||
{
|
||||
name: 'dark',
|
||||
value: '#1e1e1e',
|
||||
},
|
||||
],
|
||||
// The background is injected by our theme CSS files
|
||||
disable: true,
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
19
extensions/ql-vscode/.storybook/tsconfig.json
Normal file
19
extensions/ql-vscode/.storybook/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": ["ES2021", "dom"],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"experimentalDecorators": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
import * as React from 'react';
|
||||
import { FunctionComponent, useCallback } from 'react';
|
||||
|
||||
import { useGlobals } from '@storybook/api';
|
||||
import { IconButton, Icons, WithTooltip, TooltipLinkList, Link, WithHideFn } from '@storybook/components';
|
||||
|
||||
import { themeNames, VSCodeTheme } from './theme';
|
||||
|
||||
export const ThemeSelector: FunctionComponent = () => {
|
||||
const [{ vscodeTheme }, updateGlobals] = useGlobals();
|
||||
|
||||
const changeTheme = useCallback((theme: VSCodeTheme) => {
|
||||
updateGlobals({
|
||||
vscodeTheme: theme,
|
||||
});
|
||||
}, [updateGlobals]);
|
||||
|
||||
const createLinks = useCallback((onHide: () => void): Link[] => Object.values(VSCodeTheme).map((theme) => ({
|
||||
id: theme,
|
||||
onClick() {
|
||||
changeTheme(theme);
|
||||
onHide();
|
||||
},
|
||||
title: themeNames[theme],
|
||||
value: theme,
|
||||
active: vscodeTheme === theme,
|
||||
})), [vscodeTheme, changeTheme]);
|
||||
|
||||
return (
|
||||
<WithTooltip
|
||||
placement="top"
|
||||
trigger="click"
|
||||
closeOnClick
|
||||
tooltip={({ onHide }: WithHideFn) => (
|
||||
<TooltipLinkList
|
||||
links={createLinks(onHide)}
|
||||
/>
|
||||
)}
|
||||
>
|
||||
<IconButton
|
||||
key="theme"
|
||||
title="Change the theme of the preview"
|
||||
active={vscodeTheme !== VSCodeTheme.Dark}
|
||||
>
|
||||
<Icons icon="dashboard" />
|
||||
</IconButton>
|
||||
</WithTooltip>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,14 @@
|
||||
import * as React from 'react';
|
||||
import { addons, types } from '@storybook/addons';
|
||||
import { ThemeSelector } from './ThemeSelector';
|
||||
|
||||
const ADDON_ID = 'vscode-theme-addon';
|
||||
|
||||
addons.register(ADDON_ID, () => {
|
||||
addons.add(ADDON_ID, {
|
||||
title: 'VSCode Themes',
|
||||
type: types.TOOL,
|
||||
match: ({ viewMode }) => !!(viewMode && viewMode.match(/^(story|docs)$/)),
|
||||
render: () => <ThemeSelector />,
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,7 @@
|
||||
export function config(entry = []) {
|
||||
return [...entry, require.resolve("./preview.ts")];
|
||||
}
|
||||
|
||||
export function managerEntries(entry = []) {
|
||||
return [...entry, require.resolve("./manager.tsx")];
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
import { withTheme } from './withTheme';
|
||||
import { VSCodeTheme } from './theme';
|
||||
|
||||
export const decorators = [withTheme];
|
||||
|
||||
export const globals = {
|
||||
vscodeTheme: VSCodeTheme.Dark,
|
||||
};
|
||||
@@ -0,0 +1,9 @@
|
||||
export enum VSCodeTheme {
|
||||
Dark = 'dark',
|
||||
Light = 'light',
|
||||
}
|
||||
|
||||
export const themeNames: { [key in VSCodeTheme]: string } = {
|
||||
[VSCodeTheme.Dark]: 'Dark+',
|
||||
[VSCodeTheme.Light]: 'Light+',
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import { useEffect, useGlobals } from '@storybook/addons';
|
||||
import type { AnyFramework, PartialStoryFn as StoryFunction, StoryContext } from '@storybook/csf';
|
||||
|
||||
import { VSCodeTheme } from './theme';
|
||||
|
||||
const themeFiles: { [key in VSCodeTheme]: string } = {
|
||||
[VSCodeTheme.Dark]: require('!file-loader?modules!../../src/stories/vscode-theme-dark.css').default,
|
||||
[VSCodeTheme.Light]: require('!file-loader?modules!../../src/stories/vscode-theme-light.css').default,
|
||||
}
|
||||
|
||||
export const withTheme = (
|
||||
StoryFn: StoryFunction<AnyFramework>,
|
||||
context: StoryContext<AnyFramework>
|
||||
) => {
|
||||
const [{ vscodeTheme }] = useGlobals();
|
||||
|
||||
useEffect(() => {
|
||||
const styleSelectorId =
|
||||
context.viewMode === 'docs'
|
||||
? `addon-vscode-theme-docs-${context.id}`
|
||||
: `addon-vscode-theme-theme`;
|
||||
|
||||
const theme = Object.values(VSCodeTheme).includes(vscodeTheme) ? vscodeTheme as VSCodeTheme : VSCodeTheme.Dark;
|
||||
|
||||
document.getElementById(styleSelectorId)?.remove();
|
||||
|
||||
const link = document.createElement('link');
|
||||
link.id = styleSelectorId;
|
||||
link.href = themeFiles[theme];
|
||||
link.rel = 'stylesheet';
|
||||
|
||||
document.head.appendChild(link);
|
||||
}, [vscodeTheme]);
|
||||
|
||||
return StoryFn();
|
||||
};
|
||||
@@ -3,6 +3,7 @@
|
||||
## [UNRELEASED]
|
||||
|
||||
- Fix a bug where databases may be lost if VS Code is restarted while the extension is being started up. [#1638](https://github.com/github/vscode-codeql/pull/1638)
|
||||
- Add commands for navigating up, down, left, or right in the result viewer. Previously there were only commands for moving up and down the currently-selected path. We suggest binding keyboard shortcuts to these commands, for navigating the result viewer using the keyboard. [#1568](https://github.com/github/vscode-codeql/pull/1568)
|
||||
|
||||
## 1.7.2 - 14 October 2022
|
||||
|
||||
|
||||
@@ -99,6 +99,10 @@ When the results are ready, they're displayed in the CodeQL Query Results view.
|
||||
|
||||
If there are any problems running a query, a notification is displayed in the bottom right corner of the application. In addition to the error message, the notification includes details of how to fix the problem.
|
||||
|
||||
### Keyboad navigation
|
||||
|
||||
If you wish to navigate the query results from your keyboard, you can bind shortcuts to the **CodeQL: Navigate Up/Down/Left/Right in Result Viewer** commands.
|
||||
|
||||
## What next?
|
||||
|
||||
For more information about the CodeQL extension, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/). Otherwise, you could:
|
||||
|
||||
1488
extensions/ql-vscode/package-lock.json
generated
1488
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -602,12 +602,20 @@
|
||||
"title": "Copy Repository List"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.nextPathStep",
|
||||
"title": "CodeQL: Show Next Step on Path"
|
||||
"command": "codeQLQueryResults.down",
|
||||
"title": "CodeQL: Navigate Down in Local Result Viewer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.previousPathStep",
|
||||
"title": "CodeQL: Show Previous Step on Path"
|
||||
"command": "codeQLQueryResults.up",
|
||||
"title": "CodeQL: Navigate Up in Local Result Viewer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.right",
|
||||
"title": "CodeQL: Navigate Right in Local Result Viewer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.left",
|
||||
"title": "CodeQL: Navigate Left in Local Result Viewer"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.restartQueryServer",
|
||||
@@ -645,6 +653,26 @@
|
||||
"command": "codeQL.gotoQL",
|
||||
"title": "CodeQL: Go to QL Code",
|
||||
"enablement": "codeql.hasQLSource"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.startRecording",
|
||||
"title": "CodeQL: Mock GitHub API Server: Start Scenario Recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.saveScenario",
|
||||
"title": "CodeQL: Mock GitHub API Server: Save Scenario"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.cancelRecording",
|
||||
"title": "CodeQL: Mock GitHub API Server: Cancel Scenario Recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.loadScenario",
|
||||
"title": "CodeQL: Mock GitHub API Server: Load Scenario"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.unloadScenario",
|
||||
"title": "CodeQL: Mock GitHub API Server: Unload Scenario"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
@@ -1104,6 +1132,26 @@
|
||||
{
|
||||
"command": "codeQLTests.showOutputDifferences",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.startRecording",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && !codeQL.mockGitHubApiServer.recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.saveScenario",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.cancelRecording",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.loadScenario",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && !codeQL.mockGitHubApiServer.recording"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.mockGitHubApiServer.unloadScenario",
|
||||
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.scenarioLoaded"
|
||||
}
|
||||
],
|
||||
"editor/context": [
|
||||
@@ -1210,7 +1258,8 @@
|
||||
"lint": "eslint src test --ext .ts,.tsx --max-warnings=0",
|
||||
"format-staged": "lint-staged",
|
||||
"storybook": "start-storybook -p 6006",
|
||||
"build-storybook": "build-storybook"
|
||||
"build-storybook": "build-storybook",
|
||||
"lint:scenarios": "ts-node scripts/lint-scenarios.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
@@ -1305,6 +1354,7 @@
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
"ajv": "^8.11.0",
|
||||
"ansi-colors": "^4.1.1",
|
||||
"applicationinsights": "^2.3.5",
|
||||
"babel-loader": "^8.2.5",
|
||||
@@ -1323,7 +1373,7 @@
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~8.0.1",
|
||||
"husky": "~4.3.8",
|
||||
"jest": "^29.0.3",
|
||||
"jest-environment-jsdom": "^29.0.3",
|
||||
"lint-staged": "~10.2.2",
|
||||
@@ -1338,6 +1388,7 @@
|
||||
"sinon-chai": "~3.5.0",
|
||||
"through2": "^4.0.2",
|
||||
"ts-jest": "^29.0.1",
|
||||
"ts-json-schema-generator": "^1.1.2",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
|
||||
79
extensions/ql-vscode/scripts/lint-scenarios.ts
Normal file
79
extensions/ql-vscode/scripts/lint-scenarios.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import Ajv from 'ajv';
|
||||
import * as tsj from 'ts-json-schema-generator';
|
||||
|
||||
const extensionDirectory = path.resolve(__dirname, '..');
|
||||
const rootDirectory = path.resolve(extensionDirectory, '../..');
|
||||
const scenariosDirectory = path.resolve(extensionDirectory, 'src/mocks/scenarios');
|
||||
|
||||
const debug = process.env.RUNNER_DEBUG || process.argv.includes('--debug');
|
||||
|
||||
async function lintScenarios() {
|
||||
const schema = tsj.createGenerator({
|
||||
path: path.resolve(extensionDirectory, 'src/mocks/gh-api-request.ts'),
|
||||
tsconfig: path.resolve(extensionDirectory, 'tsconfig.json'),
|
||||
type: 'GitHubApiRequest',
|
||||
skipTypeCheck: true,
|
||||
topRef: true,
|
||||
additionalProperties: true,
|
||||
}).createSchema('GitHubApiRequest');
|
||||
|
||||
const ajv = new Ajv();
|
||||
|
||||
if (!ajv.validateSchema(schema)) {
|
||||
throw new Error('Invalid schema: ' + ajv.errorsText());
|
||||
}
|
||||
|
||||
const validate = await ajv.compile(schema);
|
||||
|
||||
let invalidFiles = 0;
|
||||
|
||||
if (!(await fs.pathExists(scenariosDirectory))) {
|
||||
console.error('Scenarios directory does not exist: ' + scenariosDirectory);
|
||||
// Do not exit with a non-zero status code, as this is not a fatal error.
|
||||
return;
|
||||
}
|
||||
|
||||
for await (const file of getFiles(scenariosDirectory)) {
|
||||
if (!file.endsWith('.json')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const contents = await fs.readFile(file, 'utf8');
|
||||
const data = JSON.parse(contents);
|
||||
|
||||
if (!validate(data)) {
|
||||
validate.errors?.forEach(error => {
|
||||
// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message
|
||||
console.log(`::error file=${path.relative(rootDirectory, file)}::${error.instancePath}: ${error.message}`);
|
||||
});
|
||||
invalidFiles++;
|
||||
} else if (debug) {
|
||||
console.log(`File '${path.relative(rootDirectory, file)}' is valid`);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidFiles > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/45130990
|
||||
async function* getFiles(dir: string): AsyncGenerator<string> {
|
||||
const dirents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const dirent of dirents) {
|
||||
const res = path.resolve(dir, dirent.name);
|
||||
if (dirent.isDirectory()) {
|
||||
yield* getFiles(res);
|
||||
} else {
|
||||
yield res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lintScenarios().catch(e => {
|
||||
console.error(e);
|
||||
process.exit(2);
|
||||
});
|
||||
@@ -944,16 +944,14 @@ export class CodeQLCliServer implements Disposable {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], args, 'Installing pack dependencies');
|
||||
}
|
||||
|
||||
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, precompile = true): Promise<void> {
|
||||
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, moreOptions: string[]): Promise<void> {
|
||||
const args = [
|
||||
'-o',
|
||||
outputPath,
|
||||
dir,
|
||||
...moreOptions,
|
||||
...this.getAdditionalPacksArg(workspaceFolders)
|
||||
];
|
||||
if (!precompile && await this.cliConstraints.supportsNoPrecompile()) {
|
||||
args.push('--no-precompile');
|
||||
}
|
||||
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'bundle'], args, 'Bundling pack');
|
||||
}
|
||||
@@ -1288,6 +1286,13 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||
|
||||
/**
|
||||
* CLI version where building QLX packs for remote queries is supported.
|
||||
* (The options were _accepted_ by a few earlier versions, but only from
|
||||
* 2.11.3 will it actually use the existing compilation cache correctly).
|
||||
*/
|
||||
public static CLI_VERSION_QLX_REMOTE = new SemVer('2.11.3');
|
||||
|
||||
/**
|
||||
* CLI version where the `resolve ml-models` subcommand was introduced.
|
||||
*/
|
||||
@@ -1333,7 +1338,7 @@ export class CliVersionConstraint {
|
||||
/**
|
||||
* CLI version that supports the new query server.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_NEW_QUERY_SERVER = new SemVer('2.11.0');
|
||||
public static CLI_VERSION_WITH_NEW_QUERY_SERVER = new SemVer('2.11.1');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
@@ -1383,6 +1388,10 @@ export class CliVersionConstraint {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES);
|
||||
}
|
||||
|
||||
async supportsQlxRemote() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_QLX_REMOTE);
|
||||
}
|
||||
|
||||
async supportsResolveMlModels() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
|
||||
}
|
||||
@@ -1414,8 +1423,11 @@ export class CliVersionConstraint {
|
||||
async supportsNewQueryServer() {
|
||||
// TODO while under development, users _must_ opt-in to the new query server
|
||||
// by setting the `codeql.canaryQueryServer` setting to `true`.
|
||||
// Ignore the version check for now.
|
||||
return allowCanaryQueryServer();
|
||||
// return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||
return allowCanaryQueryServer() &&
|
||||
this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||
}
|
||||
|
||||
async supportsNewQueryServerForTests() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,13 +10,21 @@ export const ALL_SETTINGS: Setting[] = [];
|
||||
export class Setting {
|
||||
name: string;
|
||||
parent?: Setting;
|
||||
private _hasChildren = false;
|
||||
|
||||
constructor(name: string, parent?: Setting) {
|
||||
this.name = name;
|
||||
this.parent = parent;
|
||||
if (parent !== undefined) {
|
||||
parent._hasChildren = true;
|
||||
}
|
||||
ALL_SETTINGS.push(this);
|
||||
}
|
||||
|
||||
get hasChildren() {
|
||||
return this._hasChildren;
|
||||
}
|
||||
|
||||
get qualifiedName(): string {
|
||||
if (this.parent === undefined) {
|
||||
return this.name;
|
||||
@@ -430,13 +438,24 @@ export function isVariantAnalysisLiveResultsEnabled(): boolean {
|
||||
return !!LIVE_RESULTS.getValue<boolean>();
|
||||
}
|
||||
|
||||
// Settings for mocking the GitHub API.
|
||||
const MOCK_GH_API_SERVER = new Setting('mockGitHubApiServer', ROOT_SETTING);
|
||||
|
||||
/**
|
||||
* A flag indicating whether to enable a mock GitHub API server.
|
||||
*/
|
||||
const MOCK_GH_API_SERVER = new Setting('mockGitHubApiServer', REMOTE_QUERIES_SETTING);
|
||||
const MOCK_GH_API_SERVER_ENABLED = new Setting('enabled', MOCK_GH_API_SERVER);
|
||||
|
||||
/**
|
||||
* A path to a directory containing test scenarios. If this setting is not set,
|
||||
* the mock server will a default location for test scenarios in dev mode, and
|
||||
* will show a menu to select a directory in production mode.
|
||||
*/
|
||||
const MOCK_GH_API_SERVER_SCENARIOS_PATH = new Setting('scenariosPath', MOCK_GH_API_SERVER);
|
||||
|
||||
export interface MockGitHubApiConfig {
|
||||
mockServerEnabled: boolean;
|
||||
mockScenariosPath: string;
|
||||
onDidChangeConfiguration: Event<void>;
|
||||
}
|
||||
|
||||
@@ -446,6 +465,14 @@ export class MockGitHubApiConfigListener extends ConfigListener implements MockG
|
||||
}
|
||||
|
||||
public get mockServerEnabled(): boolean {
|
||||
return !!MOCK_GH_API_SERVER.getValue<boolean>();
|
||||
return !!MOCK_GH_API_SERVER_ENABLED.getValue<boolean>();
|
||||
}
|
||||
|
||||
public get mockScenariosPath(): string {
|
||||
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockGitHubApiServerScenariosPath(): string | undefined {
|
||||
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
|
||||
}
|
||||
|
||||
@@ -116,6 +116,7 @@ import {
|
||||
} from './remote-queries/gh-api/variant-analysis';
|
||||
import { VariantAnalysisManager } from './remote-queries/variant-analysis-manager';
|
||||
import { createVariantAnalysisContentProvider } from './remote-queries/variant-analysis-content-provider';
|
||||
import { MockGitHubApiServer } from './mocks/mock-gh-api-server';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
@@ -1190,6 +1191,39 @@ async function activateWithInstalledDistribution(
|
||||
)
|
||||
);
|
||||
|
||||
const mockServer = new MockGitHubApiServer(ctx);
|
||||
ctx.subscriptions.push(mockServer);
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.mockGitHubApiServer.startRecording',
|
||||
async () => await mockServer.startRecording(),
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.mockGitHubApiServer.saveScenario',
|
||||
async () => await mockServer.saveScenario(),
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.mockGitHubApiServer.cancelRecording',
|
||||
async () => await mockServer.cancelRecording(),
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.mockGitHubApiServer.loadScenario',
|
||||
async () => await mockServer.loadScenario(),
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.mockGitHubApiServer.unloadScenario',
|
||||
async () => await mockServer.unloadScenario(),
|
||||
)
|
||||
);
|
||||
|
||||
await commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
|
||||
void logger.log('Successfully finished extension initialization.');
|
||||
|
||||
@@ -22,7 +22,11 @@ fs.ensureDirSync(upgradesTmpDir);
|
||||
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
tmpDir.removeCallback();
|
||||
try {
|
||||
tmpDir.removeCallback();
|
||||
} catch (e) {
|
||||
void logger.log(`Failed to remove temporary directory ${tmpDir.name}: ${e}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -584,11 +588,3 @@ export async function* walkDirectory(dir: string): AsyncIterableIterator<string>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluralizes a word.
|
||||
* Example: Returns "N repository" if N is one, "N repositories" otherwise.
|
||||
*/
|
||||
export function pluralize(numItems: number | undefined, singular: string, plural: string): string {
|
||||
return numItems ? `${numItems} ${numItems === 1 ? singular : plural}` : '';
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { getRawQueryName, QueryHistoryInfo } from './query-history-info';
|
||||
import { buildRepoLabel, getRawQueryName, QueryHistoryInfo } from './query-history-info';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { pluralize } from './helpers';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { pluralize } from './pure/word';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
@@ -79,23 +79,12 @@ export class HistoryItemLabelProvider {
|
||||
};
|
||||
}
|
||||
|
||||
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
|
||||
private buildRepoLabel(item: RemoteQueryHistoryItem): string {
|
||||
const repositoryCount = item.remoteQuery.repositoryCount;
|
||||
|
||||
if (repositoryCount) {
|
||||
return pluralize(repositoryCount, 'repository', 'repositories');
|
||||
}
|
||||
|
||||
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||
d: this.buildRepoLabel(item),
|
||||
d: buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
@@ -108,7 +97,7 @@ export class HistoryItemLabelProvider {
|
||||
return {
|
||||
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.query.language})`,
|
||||
d: 'TODO',
|
||||
d: buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.variantAnalysis.query.filePath),
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
RawResultsSortState,
|
||||
NavigationDirection,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import { commandRunner } from './commandRunner';
|
||||
@@ -141,19 +142,24 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
|
||||
this.handleSelectionChange.bind(this)
|
||||
)
|
||||
);
|
||||
void logger.log('Registering path-step navigation commands.');
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryResults.nextPathStep',
|
||||
this.navigatePathStep.bind(this, 1)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryResults.previousPathStep',
|
||||
this.navigatePathStep.bind(this, -1)
|
||||
)
|
||||
);
|
||||
const navigationCommands = {
|
||||
'codeQLQueryResults.up': NavigationDirection.up,
|
||||
'codeQLQueryResults.down': NavigationDirection.down,
|
||||
'codeQLQueryResults.left': NavigationDirection.left,
|
||||
'codeQLQueryResults.right': NavigationDirection.right,
|
||||
// For backwards compatibility with keybindings set using an earlier version of the extension.
|
||||
'codeQLQueryResults.nextPathStep': NavigationDirection.down,
|
||||
'codeQLQueryResults.previousPathStep': NavigationDirection.up,
|
||||
};
|
||||
void logger.log('Registering result view navigation commands.');
|
||||
for (const [commandId, direction] of Object.entries(navigationCommands)) {
|
||||
this.push(
|
||||
commandRunner(
|
||||
commandId,
|
||||
this.navigateResultView.bind(this, direction)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
this.push(
|
||||
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
||||
@@ -169,8 +175,13 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
|
||||
);
|
||||
}
|
||||
|
||||
async navigatePathStep(direction: number): Promise<void> {
|
||||
await this.postMessage({ t: 'navigatePath', direction });
|
||||
async navigateResultView(direction: NavigationDirection): Promise<void> {
|
||||
if (!this.panel?.visible) {
|
||||
return;
|
||||
}
|
||||
// Reveal the panel now as the subsequent call to 'Window.showTextEditor' in 'showLocation' may destroy the webview otherwise.
|
||||
this.panel.reveal();
|
||||
await this.postMessage({ t: 'navigate', direction });
|
||||
}
|
||||
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
|
||||
@@ -12,13 +12,17 @@ export enum RequestKind {
|
||||
GetVariantAnalysisRepoResult = 'getVariantAnalysisRepoResult',
|
||||
}
|
||||
|
||||
export interface BasicErorResponse {
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface GetRepoRequest {
|
||||
request: {
|
||||
kind: RequestKind.GetRepo
|
||||
},
|
||||
response: {
|
||||
status: number,
|
||||
body: Repository
|
||||
body: Repository | BasicErorResponse | undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +32,7 @@ export interface SubmitVariantAnalysisRequest {
|
||||
},
|
||||
response: {
|
||||
status: number,
|
||||
body: VariantAnalysis
|
||||
body?: VariantAnalysis | BasicErorResponse
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,7 +42,7 @@ export interface GetVariantAnalysisRequest {
|
||||
},
|
||||
response: {
|
||||
status: number,
|
||||
body: VariantAnalysis
|
||||
body?: VariantAnalysis | BasicErorResponse
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +53,7 @@ export interface GetVariantAnalysisRepoRequest {
|
||||
},
|
||||
response: {
|
||||
status: number,
|
||||
body: VariantAnalysisRepoTask
|
||||
body?: VariantAnalysisRepoTask | BasicErorResponse
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +64,8 @@ export interface GetVariantAnalysisRepoResultRequest {
|
||||
},
|
||||
response: {
|
||||
status: number,
|
||||
body: ArrayBuffer
|
||||
body?: Buffer | string,
|
||||
contentType: string,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,3 +75,28 @@ export type GitHubApiRequest =
|
||||
| GetVariantAnalysisRequest
|
||||
| GetVariantAnalysisRepoRequest
|
||||
| GetVariantAnalysisRepoResultRequest;
|
||||
|
||||
export const isGetRepoRequest = (
|
||||
request: GitHubApiRequest
|
||||
): request is GetRepoRequest =>
|
||||
request.request.kind === RequestKind.GetRepo;
|
||||
|
||||
export const isSubmitVariantAnalysisRequest = (
|
||||
request: GitHubApiRequest
|
||||
): request is SubmitVariantAnalysisRequest =>
|
||||
request.request.kind === RequestKind.SubmitVariantAnalysis;
|
||||
|
||||
export const isGetVariantAnalysisRequest = (
|
||||
request: GitHubApiRequest
|
||||
): request is GetVariantAnalysisRequest =>
|
||||
request.request.kind === RequestKind.GetVariantAnalysis;
|
||||
|
||||
export const isGetVariantAnalysisRepoRequest = (
|
||||
request: GitHubApiRequest
|
||||
): request is GetVariantAnalysisRepoRequest =>
|
||||
request.request.kind === RequestKind.GetVariantAnalysisRepo;
|
||||
|
||||
export const isGetVariantAnalysisRepoResultRequest = (
|
||||
request: GitHubApiRequest
|
||||
): request is GetVariantAnalysisRepoResultRequest =>
|
||||
request.request.kind === RequestKind.GetVariantAnalysisRepoResult;
|
||||
|
||||
@@ -1,49 +1,224 @@
|
||||
import { MockGitHubApiConfigListener } from '../config';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { commands, env, ExtensionContext, ExtensionMode, QuickPickItem, Uri, window } from 'vscode';
|
||||
import { setupServer, SetupServerApi } from 'msw/node';
|
||||
|
||||
import { getMockGitHubApiServerScenariosPath, MockGitHubApiConfigListener } from '../config';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
|
||||
import { Recorder } from './recorder';
|
||||
import { createRequestHandlers } from './request-handlers';
|
||||
import { getDirectoryNamesInsidePath } from '../pure/files';
|
||||
|
||||
/**
|
||||
* Enables mocking of the GitHub API server via HTTP interception, using msw.
|
||||
*/
|
||||
export class MockGitHubApiServer {
|
||||
export class MockGitHubApiServer extends DisposableObject {
|
||||
private isListening: boolean;
|
||||
private config: MockGitHubApiConfigListener;
|
||||
|
||||
constructor() {
|
||||
private readonly server: SetupServerApi;
|
||||
private readonly recorder: Recorder;
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
) {
|
||||
super();
|
||||
this.isListening = false;
|
||||
this.config = new MockGitHubApiConfigListener();
|
||||
|
||||
this.server = setupServer();
|
||||
this.recorder = this.push(new Recorder(this.server));
|
||||
|
||||
this.setupConfigListener();
|
||||
}
|
||||
|
||||
public startServer(): void {
|
||||
this.isListening = true;
|
||||
if (this.isListening) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Enable HTTP interception.
|
||||
this.server.listen();
|
||||
this.isListening = true;
|
||||
}
|
||||
|
||||
public stopServer(): void {
|
||||
this.server.close();
|
||||
this.isListening = false;
|
||||
|
||||
// TODO: Disable HTTP interception.
|
||||
}
|
||||
|
||||
public loadScenario(): void {
|
||||
// TODO: Implement logic to load a scenario from a directory.
|
||||
public async loadScenario(): Promise<void> {
|
||||
const scenariosPath = await this.getScenariosPath();
|
||||
if (!scenariosPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const scenarioNames = await getDirectoryNamesInsidePath(scenariosPath);
|
||||
const scenarioQuickPickItems = scenarioNames.map(s => ({ label: s }));
|
||||
const quickPickOptions = {
|
||||
placeHolder: 'Select a scenario to load',
|
||||
};
|
||||
const selectedScenario = await window.showQuickPick<QuickPickItem>(
|
||||
scenarioQuickPickItems,
|
||||
quickPickOptions);
|
||||
if (!selectedScenario) {
|
||||
return;
|
||||
}
|
||||
|
||||
const scenarioName = selectedScenario.label;
|
||||
const scenarioPath = path.join(scenariosPath, scenarioName);
|
||||
|
||||
const handlers = await createRequestHandlers(scenarioPath);
|
||||
this.server.resetHandlers();
|
||||
this.server.use(...handlers);
|
||||
|
||||
// Set a value in the context to track whether we have a scenario loaded.
|
||||
// This allows us to use this to show/hide commands (see package.json)
|
||||
await commands.executeCommand('setContext', 'codeQL.mockGitHubApiServer.scenarioLoaded', true);
|
||||
|
||||
await window.showInformationMessage(`Loaded scenario '${scenarioName}'`);
|
||||
}
|
||||
|
||||
public listScenarios(): void {
|
||||
// TODO: Implement logic to list all available scenarios.
|
||||
public async unloadScenario(): Promise<void> {
|
||||
if (!this.isScenarioLoaded()) {
|
||||
await window.showInformationMessage('No scenario currently loaded');
|
||||
}
|
||||
else {
|
||||
await this.unloadAllScenarios();
|
||||
await window.showInformationMessage('Unloaded scenario');
|
||||
}
|
||||
}
|
||||
|
||||
public recordScenario(): void {
|
||||
// TODO: Implement logic to record a new scenario to a directory.
|
||||
public async startRecording(): Promise<void> {
|
||||
if (this.recorder.isRecording) {
|
||||
void window.showErrorMessage('A scenario is already being recorded. Use the "Save Scenario" or "Cancel Scenario" commands to finish recording.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isScenarioLoaded()) {
|
||||
await this.unloadAllScenarios();
|
||||
void window.showInformationMessage('A scenario was loaded so it has been unloaded');
|
||||
}
|
||||
|
||||
this.recorder.start();
|
||||
// Set a value in the context to track whether we are recording. This allows us to use this to show/hide commands (see package.json)
|
||||
await commands.executeCommand('setContext', 'codeQL.mockGitHubApiServer.recording', true);
|
||||
|
||||
await window.showInformationMessage('Recording scenario. To save the scenario, use the "CodeQL Mock GitHub API Server: Save Scenario" command.');
|
||||
}
|
||||
|
||||
public async saveScenario(): Promise<void> {
|
||||
const scenariosPath = await this.getScenariosPath();
|
||||
if (!scenariosPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set a value in the context to track whether we are recording. This allows us to use this to show/hide commands (see package.json)
|
||||
await commands.executeCommand('setContext', 'codeQL.mockGitHubApiServer.recording', false);
|
||||
|
||||
if (!this.recorder.isRecording) {
|
||||
void window.showErrorMessage('No scenario is currently being recorded.');
|
||||
return;
|
||||
}
|
||||
if (!this.recorder.anyRequestsRecorded) {
|
||||
void window.showWarningMessage('No requests were recorded. Cancelling scenario.');
|
||||
|
||||
await this.stopRecording();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const name = await window.showInputBox({
|
||||
title: 'Save scenario',
|
||||
prompt: 'Enter a name for the scenario.',
|
||||
placeHolder: 'successful-run',
|
||||
});
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const filePath = await this.recorder.save(scenariosPath, name);
|
||||
|
||||
await this.stopRecording();
|
||||
|
||||
const action = await window.showInformationMessage(`Scenario saved to ${filePath}`, 'Open directory');
|
||||
if (action === 'Open directory') {
|
||||
await env.openExternal(Uri.file(filePath));
|
||||
}
|
||||
}
|
||||
|
||||
public async cancelRecording(): Promise<void> {
|
||||
if (!this.recorder.isRecording) {
|
||||
void window.showErrorMessage('No scenario is currently being recorded.');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.stopRecording();
|
||||
|
||||
void window.showInformationMessage('Recording cancelled.');
|
||||
}
|
||||
|
||||
private async stopRecording(): Promise<void> {
|
||||
// Set a value in the context to track whether we are recording. This allows us to use this to show/hide commands (see package.json)
|
||||
await commands.executeCommand('setContext', 'codeQL.mockGitHubApiServer.recording', false);
|
||||
|
||||
await this.recorder.stop();
|
||||
await this.recorder.clear();
|
||||
}
|
||||
|
||||
private async getScenariosPath(): Promise<string | undefined> {
|
||||
const scenariosPath = getMockGitHubApiServerScenariosPath();
|
||||
if (scenariosPath) {
|
||||
return scenariosPath;
|
||||
}
|
||||
|
||||
if (this.ctx.extensionMode === ExtensionMode.Development) {
|
||||
const developmentScenariosPath = Uri.joinPath(this.ctx.extensionUri, 'src/mocks/scenarios').fsPath.toString();
|
||||
if (await fs.pathExists(developmentScenariosPath)) {
|
||||
return developmentScenariosPath;
|
||||
}
|
||||
}
|
||||
|
||||
const directories = await window.showOpenDialog({
|
||||
canSelectFolders: true,
|
||||
canSelectFiles: false,
|
||||
canSelectMany: false,
|
||||
openLabel: 'Select scenarios directory',
|
||||
title: 'Select scenarios directory',
|
||||
});
|
||||
if (directories === undefined || directories.length === 0) {
|
||||
void window.showErrorMessage('No scenarios directory selected.');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Unfortunately, we cannot save the directory in the configuration because that requires
|
||||
// the configuration to be registered. If we do that, it would be visible to all users; there
|
||||
// is no "when" clause that would allow us to only show it to users who have enabled the feature flag.
|
||||
|
||||
return directories[0].fsPath;
|
||||
}
|
||||
|
||||
private isScenarioLoaded(): boolean {
|
||||
return this.server.listHandlers().length > 0;
|
||||
}
|
||||
|
||||
private async unloadAllScenarios(): Promise<void> {
|
||||
this.server.resetHandlers();
|
||||
await commands.executeCommand('setContext', 'codeQL.mockGitHubApiServer.scenarioLoaded', false);
|
||||
}
|
||||
|
||||
private setupConfigListener(): void {
|
||||
this.config.onDidChangeConfiguration(() => {
|
||||
if (this.config.mockServerEnabled && !this.isListening) {
|
||||
this.startServer();
|
||||
} else if (!this.config.mockServerEnabled && this.isListening) {
|
||||
this.stopServer();
|
||||
}
|
||||
});
|
||||
// The config "changes" from the default at startup, so we need to call onConfigChange() to ensure the server is
|
||||
// started if required.
|
||||
this.onConfigChange();
|
||||
this.config.onDidChangeConfiguration(() => this.onConfigChange());
|
||||
}
|
||||
|
||||
private onConfigChange(): void {
|
||||
if (this.config.mockServerEnabled && !this.isListening) {
|
||||
this.startServer();
|
||||
} else if (!this.config.mockServerEnabled && this.isListening) {
|
||||
this.stopServer();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
220
extensions/ql-vscode/src/mocks/recorder.ts
Normal file
220
extensions/ql-vscode/src/mocks/recorder.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { MockedRequest } from 'msw';
|
||||
import { SetupServerApi } from 'msw/node';
|
||||
import { IsomorphicResponse } from '@mswjs/interceptors';
|
||||
|
||||
import { Headers } from 'headers-polyfill';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
|
||||
import { GetVariantAnalysisRepoResultRequest, GitHubApiRequest, RequestKind } from './gh-api-request';
|
||||
|
||||
export class Recorder extends DisposableObject {
|
||||
private readonly allRequests = new Map<string, MockedRequest>();
|
||||
private currentRecordedScenario: GitHubApiRequest[] = [];
|
||||
|
||||
private _isRecording = false;
|
||||
|
||||
constructor(
|
||||
private readonly server: SetupServerApi,
|
||||
) {
|
||||
super();
|
||||
this.onRequestStart = this.onRequestStart.bind(this);
|
||||
this.onResponseBypass = this.onResponseBypass.bind(this);
|
||||
}
|
||||
|
||||
public get isRecording(): boolean {
|
||||
return this._isRecording;
|
||||
}
|
||||
|
||||
public get anyRequestsRecorded(): boolean {
|
||||
return this.currentRecordedScenario.length > 0;
|
||||
}
|
||||
|
||||
public start(): void {
|
||||
if (this._isRecording) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._isRecording = true;
|
||||
|
||||
this.clear();
|
||||
|
||||
this.server.events.on('request:start', this.onRequestStart);
|
||||
this.server.events.on('response:bypass', this.onResponseBypass);
|
||||
}
|
||||
|
||||
public stop(): void {
|
||||
if (!this._isRecording) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._isRecording = false;
|
||||
|
||||
this.server.events.removeListener('request:start', this.onRequestStart);
|
||||
this.server.events.removeListener('response:bypass', this.onResponseBypass);
|
||||
}
|
||||
|
||||
public clear() {
|
||||
this.currentRecordedScenario = [];
|
||||
this.allRequests.clear();
|
||||
}
|
||||
|
||||
public async save(scenariosPath: string, name: string): Promise<string> {
|
||||
const scenarioDirectory = path.join(scenariosPath, name);
|
||||
|
||||
await fs.ensureDir(scenarioDirectory);
|
||||
|
||||
for (let i = 0; i < this.currentRecordedScenario.length; i++) {
|
||||
const request = this.currentRecordedScenario[i];
|
||||
|
||||
const fileName = `${i}-${request.request.kind}.json`;
|
||||
const filePath = path.join(scenarioDirectory, fileName);
|
||||
|
||||
let writtenRequest = {
|
||||
...request
|
||||
};
|
||||
|
||||
if (shouldWriteBodyToFile(writtenRequest)) {
|
||||
const extension = writtenRequest.response.contentType === 'application/zip' ? 'zip' : 'bin';
|
||||
|
||||
const bodyFileName = `${i}-${writtenRequest.request.kind}.body.${extension}`;
|
||||
const bodyFilePath = path.join(scenarioDirectory, bodyFileName);
|
||||
await fs.writeFile(bodyFilePath, writtenRequest.response.body);
|
||||
|
||||
writtenRequest = {
|
||||
...writtenRequest,
|
||||
response: {
|
||||
...writtenRequest.response,
|
||||
body: `file:${bodyFileName}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await fs.writeFile(filePath, JSON.stringify(writtenRequest, null, 2));
|
||||
}
|
||||
|
||||
this.stop();
|
||||
|
||||
return scenarioDirectory;
|
||||
}
|
||||
|
||||
private onRequestStart(request: MockedRequest): void {
|
||||
if (request.headers.has('x-vscode-codeql-msw-bypass')) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.allRequests.set(request.id, request);
|
||||
}
|
||||
|
||||
private async onResponseBypass(response: IsomorphicResponse, requestId: string): Promise<void> {
|
||||
const request = this.allRequests.get(requestId);
|
||||
this.allRequests.delete(requestId);
|
||||
if (!request) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.body === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const gitHubApiRequest = await createGitHubApiRequest(request.url.toString(), response.status, response.body, response.headers);
|
||||
if (!gitHubApiRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.currentRecordedScenario.push(gitHubApiRequest);
|
||||
}
|
||||
}
|
||||
|
||||
async function createGitHubApiRequest(url: string, status: number, body: string, headers: Headers): Promise<GitHubApiRequest | undefined> {
|
||||
if (!url) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (url.match(/\/repos\/[a-zA-Z0-9-_.]+\/[a-zA-Z0-9-_.]+$/)) {
|
||||
return {
|
||||
request: {
|
||||
kind: RequestKind.GetRepo,
|
||||
},
|
||||
response: {
|
||||
status,
|
||||
body: JSON.parse(body),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses$/)) {
|
||||
return {
|
||||
request: {
|
||||
kind: RequestKind.SubmitVariantAnalysis,
|
||||
},
|
||||
response: {
|
||||
status,
|
||||
body: JSON.parse(body),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses\/\d+$/)) {
|
||||
return {
|
||||
request: {
|
||||
kind: RequestKind.GetVariantAnalysis,
|
||||
},
|
||||
response: {
|
||||
status,
|
||||
body: JSON.parse(body),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const repoTaskMatch = url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses\/\d+\/repositories\/(?<repositoryId>\d+)$/);
|
||||
if (repoTaskMatch?.groups?.repositoryId) {
|
||||
return {
|
||||
request: {
|
||||
kind: RequestKind.GetVariantAnalysisRepo,
|
||||
repositoryId: parseInt(repoTaskMatch.groups.repositoryId, 10),
|
||||
},
|
||||
response: {
|
||||
status,
|
||||
body: JSON.parse(body),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// if url is a download URL for a variant analysis result, then it's a get-variant-analysis-repoResult.
|
||||
const repoDownloadMatch = url.match(/objects-origin\.githubusercontent\.com\/codeql-query-console\/codeql-variant-analysis-repo-tasks\/\d+\/(?<repositoryId>\d+)/);
|
||||
if (repoDownloadMatch?.groups?.repositoryId) {
|
||||
// msw currently doesn't support binary response bodies, so we need to download this separately
|
||||
// see https://github.com/mswjs/interceptors/blob/15eafa6215a328219999403e3ff110e71699b016/src/interceptors/ClientRequest/utils/getIncomingMessageBody.ts#L24-L33
|
||||
// Essentially, mws is trying to decode a ZIP file as UTF-8 which changes the bytes and corrupts the file.
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
// We need to ensure we don't end up in an infinite loop, since this request will also be intercepted
|
||||
'x-vscode-codeql-msw-bypass': 'true',
|
||||
},
|
||||
});
|
||||
const responseBuffer = await response.buffer();
|
||||
|
||||
return {
|
||||
request: {
|
||||
kind: RequestKind.GetVariantAnalysisRepoResult,
|
||||
repositoryId: parseInt(repoDownloadMatch.groups.repositoryId, 10),
|
||||
},
|
||||
response: {
|
||||
status,
|
||||
body: responseBuffer,
|
||||
contentType: headers.get('content-type') ?? 'application/octet-stream',
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function shouldWriteBodyToFile(request: GitHubApiRequest): request is GetVariantAnalysisRepoResultRequest {
|
||||
return request.response.body instanceof Buffer;
|
||||
}
|
||||
156
extensions/ql-vscode/src/mocks/request-handlers.ts
Normal file
156
extensions/ql-vscode/src/mocks/request-handlers.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { DefaultBodyType, MockedRequest, rest, RestHandler } from 'msw';
|
||||
import {
|
||||
GitHubApiRequest,
|
||||
isGetRepoRequest,
|
||||
isGetVariantAnalysisRepoRequest,
|
||||
isGetVariantAnalysisRepoResultRequest,
|
||||
isGetVariantAnalysisRequest,
|
||||
isSubmitVariantAnalysisRequest
|
||||
} from './gh-api-request';
|
||||
|
||||
const baseUrl = 'https://api.github.com';
|
||||
|
||||
export type RequestHandler = RestHandler<MockedRequest<DefaultBodyType>>;
|
||||
|
||||
export async function createRequestHandlers(scenarioDirPath: string): Promise<RequestHandler[]> {
|
||||
const requests = await readRequestFiles(scenarioDirPath);
|
||||
|
||||
const handlers = [
|
||||
createGetRepoRequestHandler(requests),
|
||||
createSubmitVariantAnalysisRequestHandler(requests),
|
||||
createGetVariantAnalysisRequestHandler(requests),
|
||||
createGetVariantAnalysisRepoRequestHandler(requests),
|
||||
createGetVariantAnalysisRepoResultRequestHandler(requests),
|
||||
];
|
||||
|
||||
return handlers;
|
||||
}
|
||||
|
||||
async function readRequestFiles(scenarioDirPath: string): Promise<GitHubApiRequest[]> {
|
||||
const files = await fs.readdir(scenarioDirPath);
|
||||
|
||||
const orderedFiles = files.sort((a, b) => {
|
||||
const aNum = parseInt(a.split('-')[0]);
|
||||
const bNum = parseInt(b.split('-')[0]);
|
||||
return aNum - bNum;
|
||||
});
|
||||
|
||||
const requests: GitHubApiRequest[] = [];
|
||||
for (const file of orderedFiles) {
|
||||
if (!file.endsWith('.json')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = path.join(scenarioDirPath, file);
|
||||
const request: GitHubApiRequest = await fs.readJson(filePath, { encoding: 'utf8' });
|
||||
|
||||
if (typeof request.response.body === 'string' && request.response.body.startsWith('file:')) {
|
||||
request.response.body = await fs.readFile(path.join(scenarioDirPath, request.response.body.substring(5)));
|
||||
}
|
||||
|
||||
requests.push(request);
|
||||
}
|
||||
|
||||
return requests;
|
||||
}
|
||||
|
||||
function createGetRepoRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||
const getRepoRequests = requests.filter(isGetRepoRequest);
|
||||
|
||||
if (getRepoRequests.length > 1) {
|
||||
throw Error('More than one get repo request found');
|
||||
}
|
||||
|
||||
const getRepoRequest = getRepoRequests[0];
|
||||
|
||||
return rest.get(`${baseUrl}/repos/:owner/:name`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(getRepoRequest.response.status),
|
||||
ctx.json(getRepoRequest.response.body),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function createSubmitVariantAnalysisRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||
const submitVariantAnalysisRequests = requests.filter(isSubmitVariantAnalysisRequest);
|
||||
|
||||
if (submitVariantAnalysisRequests.length > 1) {
|
||||
throw Error('More than one submit variant analysis request found');
|
||||
}
|
||||
|
||||
const getRepoRequest = submitVariantAnalysisRequests[0];
|
||||
|
||||
return rest.post(`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(getRepoRequest.response.status),
|
||||
ctx.json(getRepoRequest.response.body),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function createGetVariantAnalysisRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||
const getVariantAnalysisRequests = requests.filter(isGetVariantAnalysisRequest);
|
||||
let requestIndex = 0;
|
||||
|
||||
// During the lifetime of a variant analysis run, there are multiple requests
|
||||
// to get the variant analysis. We need to return different responses for each
|
||||
// request, so keep an index of the request and return the appropriate response.
|
||||
return rest.get(`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId`, (_req, res, ctx) => {
|
||||
const request = getVariantAnalysisRequests[requestIndex];
|
||||
|
||||
if (requestIndex < getVariantAnalysisRequests.length - 1) {
|
||||
// If there are more requests to come, increment the index.
|
||||
requestIndex++;
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(request.response.status),
|
||||
ctx.json(request.response.body),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function createGetVariantAnalysisRepoRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||
const getVariantAnalysisRepoRequests = requests.filter(isGetVariantAnalysisRepoRequest);
|
||||
|
||||
return rest.get(
|
||||
`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId/repositories/:repoId`,
|
||||
(req, res, ctx) => {
|
||||
const scenarioRequest = getVariantAnalysisRepoRequests.find(r => r.request.repositoryId.toString() === req.params.repoId);
|
||||
if (!scenarioRequest) {
|
||||
throw Error(`No scenario request found for ${req.url}`);
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(scenarioRequest.response.status),
|
||||
ctx.json(scenarioRequest.response.body),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function createGetVariantAnalysisRepoResultRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||
const getVariantAnalysisRepoResultRequests = requests.filter(isGetVariantAnalysisRepoResultRequest);
|
||||
|
||||
return rest.get(
|
||||
'https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/:variantAnalysisId/:repoId/*',
|
||||
(req, res, ctx) => {
|
||||
const scenarioRequest = getVariantAnalysisRepoResultRequests.find(r => r.request.repositoryId.toString() === req.params.repoId);
|
||||
if (!scenarioRequest) {
|
||||
throw Error(`No scenario request found for ${req.url}`);
|
||||
}
|
||||
|
||||
if (scenarioRequest.response.body) {
|
||||
return res(
|
||||
ctx.status(scenarioRequest.response.status),
|
||||
ctx.set('Content-Type', scenarioRequest.response.contentType),
|
||||
ctx.body(scenarioRequest.response.body),
|
||||
);
|
||||
} else {
|
||||
return res(
|
||||
ctx.status(scenarioRequest.response.status),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -28,3 +28,25 @@ export async function gatherQlFiles(paths: string[]): Promise<[string[], boolean
|
||||
}
|
||||
return [Array.from(gatheredUris), dirFound];
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the names of directories inside the given path.
|
||||
* @param path The path to the directory to read.
|
||||
* @returns the names of the directories inside the given path.
|
||||
*/
|
||||
export async function getDirectoryNamesInsidePath(path: string): Promise<string[]> {
|
||||
if (!(await fs.pathExists(path))) {
|
||||
throw Error(`Path does not exist: ${path}`);
|
||||
}
|
||||
if (!(await fs.stat(path)).isDirectory()) {
|
||||
throw Error(`Path is not a directory: ${path}`);
|
||||
}
|
||||
|
||||
const dirItems = await fs.readdir(path, { withFileTypes: true });
|
||||
|
||||
const dirNames = dirItems
|
||||
.filter(dirent => dirent.isDirectory())
|
||||
.map(dirent => dirent.name);
|
||||
|
||||
return dirNames;
|
||||
}
|
||||
|
||||
@@ -145,12 +145,17 @@ export interface ShowInterpretedPageMsg {
|
||||
queryPath: string;
|
||||
}
|
||||
|
||||
/** Advance to the next or previous path no in the path viewer */
|
||||
export interface NavigatePathMsg {
|
||||
t: 'navigatePath';
|
||||
export const enum NavigationDirection {
|
||||
up = 'up',
|
||||
down = 'down',
|
||||
left = 'left',
|
||||
right = 'right',
|
||||
}
|
||||
|
||||
/** 1 for next, -1 for previous */
|
||||
direction: number;
|
||||
/** Move up, down, left, or right in the result viewer. */
|
||||
export interface NavigateMsg {
|
||||
t: 'navigate';
|
||||
direction: NavigationDirection;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -168,7 +173,7 @@ export type IntoResultsViewMsg =
|
||||
| ResultsUpdatingMsg
|
||||
| SetStateMsg
|
||||
| ShowInterpretedPageMsg
|
||||
| NavigatePathMsg
|
||||
| NavigateMsg
|
||||
| UntoggleShowProblemsMsg;
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,43 +1,52 @@
|
||||
import * as sarif from 'sarif';
|
||||
|
||||
/**
|
||||
* Identifies a result, a path, or one of the nodes on a path.
|
||||
*/
|
||||
interface ResultKeyBase {
|
||||
resultIndex: number;
|
||||
pathIndex?: number;
|
||||
pathNodeIndex?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies one of the results in a result set by its index in the result list.
|
||||
*/
|
||||
export interface Result {
|
||||
export interface Result extends ResultKeyBase {
|
||||
resultIndex: number;
|
||||
pathIndex?: undefined;
|
||||
pathNodeIndex?: undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies one of the paths associated with a result.
|
||||
*/
|
||||
export interface Path extends Result {
|
||||
export interface Path extends ResultKeyBase {
|
||||
pathIndex: number;
|
||||
pathNodeIndex?: undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies one of the nodes in a path.
|
||||
*/
|
||||
export interface PathNode extends Path {
|
||||
export interface PathNode extends ResultKeyBase {
|
||||
pathIndex: number;
|
||||
pathNodeIndex: number;
|
||||
}
|
||||
|
||||
/** Alias for `undefined` but more readable in some cases */
|
||||
export const none: PathNode | undefined = undefined;
|
||||
export type ResultKey = Result | Path | PathNode;
|
||||
|
||||
/**
|
||||
* Looks up a specific result in a result set.
|
||||
*/
|
||||
export function getResult(sarif: sarif.Log, key: Result): sarif.Result | undefined {
|
||||
if (sarif.runs.length === 0) return undefined;
|
||||
if (sarif.runs[0].results === undefined) return undefined;
|
||||
const results = sarif.runs[0].results;
|
||||
return results[key.resultIndex];
|
||||
export function getResult(sarif: sarif.Log, key: Result | Path | PathNode): sarif.Result | undefined {
|
||||
return sarif.runs[0]?.results?.[key.resultIndex];
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks up a specific path in a result set.
|
||||
*/
|
||||
export function getPath(sarif: sarif.Log, key: Path): sarif.ThreadFlow | undefined {
|
||||
export function getPath(sarif: sarif.Log, key: Path | PathNode): sarif.ThreadFlow | undefined {
|
||||
const result = getResult(sarif, key);
|
||||
if (result === undefined) return undefined;
|
||||
let index = -1;
|
||||
@@ -58,22 +67,13 @@ export function getPath(sarif: sarif.Log, key: Path): sarif.ThreadFlow | undefin
|
||||
export function getPathNode(sarif: sarif.Log, key: PathNode): sarif.Location | undefined {
|
||||
const path = getPath(sarif, key);
|
||||
if (path === undefined) return undefined;
|
||||
return path.locations[key.pathNodeIndex];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the two keys are both `undefined` or contain the same set of indices.
|
||||
*/
|
||||
export function equals(key1: PathNode | undefined, key2: PathNode | undefined): boolean {
|
||||
if (key1 === key2) return true;
|
||||
if (key1 === undefined || key2 === undefined) return false;
|
||||
return key1.resultIndex === key2.resultIndex && key1.pathIndex === key2.pathIndex && key1.pathNodeIndex === key2.pathNodeIndex;
|
||||
return path.locations[key.pathNodeIndex]?.location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the two keys contain the same set of indices and neither are `undefined`.
|
||||
*/
|
||||
export function equalsNotUndefined(key1: PathNode | undefined, key2: PathNode | undefined): boolean {
|
||||
export function equalsNotUndefined(key1: Partial<PathNode> | undefined, key2: Partial<PathNode> | undefined): boolean {
|
||||
if (key1 === undefined || key2 === undefined) return false;
|
||||
return key1.resultIndex === key2.resultIndex && key1.pathIndex === key2.pathIndex && key1.pathNodeIndex === key2.pathNodeIndex;
|
||||
}
|
||||
@@ -93,3 +93,11 @@ export function getAllPaths(result: sarif.Result): sarif.ThreadFlow[] {
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a unique string representation of the given key, suitable for use
|
||||
* as the key in a map or set.
|
||||
*/
|
||||
export function keyToString(key: ResultKey) {
|
||||
return key.resultIndex + '-' + (key.pathIndex ?? '') + '-' + (key.pathNodeIndex ?? '');
|
||||
}
|
||||
|
||||
8
extensions/ql-vscode/src/pure/word.ts
Normal file
8
extensions/ql-vscode/src/pure/word.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Pluralizes a word.
|
||||
* Example: Returns "N repository" if N is one, "N repositories" otherwise.
|
||||
*/
|
||||
|
||||
export function pluralize(numItems: number | undefined, singular: string, plural: string): string {
|
||||
return numItems !== undefined ? `${numItems} ${numItems === 1 ? singular : plural}` : '';
|
||||
}
|
||||
@@ -2,6 +2,8 @@ import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-it
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { pluralize } from './pure/word';
|
||||
import { hasRepoScanCompleted } from './remote-queries/shared/variant-analysis';
|
||||
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem | VariantAnalysisHistoryItem;
|
||||
|
||||
@@ -18,14 +20,21 @@ export function getRawQueryName(item: QueryHistoryInfo): string {
|
||||
}
|
||||
}
|
||||
|
||||
export function getQueryHistoryItemId(item: QueryHistoryInfo): string {
|
||||
/**
|
||||
* Gets an identifier for the query history item which could be
|
||||
* a local/remote query or a variant analysis. This id isn't guaranteed
|
||||
* to be unique for each item in the query history.
|
||||
* @param item the history item.
|
||||
* @returns the id of the query or variant analysis.
|
||||
*/
|
||||
export function getQueryId(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
return item.initialInfo.id;
|
||||
case 'remote':
|
||||
return item.queryId;
|
||||
case 'variant-analysis':
|
||||
return item.historyItemId;
|
||||
return item.variantAnalysis.id.toString();
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
@@ -43,3 +52,22 @@ export function getQueryText(item: QueryHistoryInfo): string {
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
|
||||
export function buildRepoLabel(item: RemoteQueryHistoryItem | VariantAnalysisHistoryItem): string {
|
||||
if (item.t === 'remote') {
|
||||
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
|
||||
const repositoryCount = item.remoteQuery.repositoryCount;
|
||||
|
||||
if (repositoryCount) {
|
||||
return pluralize(repositoryCount, 'repository', 'repositories');
|
||||
}
|
||||
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
|
||||
} else if (item.t === 'variant-analysis') {
|
||||
const totalScannedRepositoryCount = item.variantAnalysis.scannedRepos?.length ?? 0;
|
||||
const completedRepositoryCount = item.variantAnalysis.scannedRepos?.filter(repo => hasRepoScanCompleted(repo)).length ?? 0;
|
||||
|
||||
return `${completedRepositoryCount}/${pluralize(totalScannedRepositoryCount, 'repository', 'repositories')}`; // e.g. "2/3 repositories"
|
||||
} else {
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,10 +31,10 @@ import { commandRunner } from './commandRunner';
|
||||
import { ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/time';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { getQueryHistoryItemId, getQueryText, QueryHistoryInfo } from './query-history-info';
|
||||
import { getQueryId, getQueryText, QueryHistoryInfo } from './query-history-info';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { registerQueryHistoryScrubber } from './query-history-scrubber';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryStatus, variantAnalysisStatusToQueryStatus } from './query-status';
|
||||
import { slurpQueryHistory, splatQueryHistory } from './query-serialization';
|
||||
import * as fs from 'fs-extra';
|
||||
import { CliVersionConstraint } from './cli';
|
||||
@@ -51,7 +51,8 @@ import { EvalLogData, parseViewerData } from './pure/log-summary-parser';
|
||||
import { QueryWithResults } from './run-queries-shared';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { VariantAnalysisManager } from './remote-queries/variant-analysis-manager';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { getTotalResultCount } from './remote-queries/shared/variant-analysis';
|
||||
|
||||
/**
|
||||
* query-history.ts
|
||||
@@ -604,14 +605,43 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
t: 'variant-analysis',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
historyItemId: nanoid(),
|
||||
variantAnalysis,
|
||||
});
|
||||
|
||||
await this.refreshTreeView();
|
||||
});
|
||||
|
||||
const variantAnalysisStatusUpdateSubscription = this.variantAnalysisManager.onVariantAnalysisStatusUpdated(async (variantAnalysis) => {
|
||||
const items = this.treeDataProvider.allHistory.filter(i => i.t === 'variant-analysis' && i.variantAnalysis.id === variantAnalysis.id);
|
||||
const status = variantAnalysisStatusToQueryStatus(variantAnalysis.status);
|
||||
|
||||
if (items.length > 0) {
|
||||
items.forEach(async (item) => {
|
||||
const variantAnalysisHistoryItem = item as VariantAnalysisHistoryItem;
|
||||
variantAnalysisHistoryItem.status = status;
|
||||
variantAnalysisHistoryItem.failureReason = variantAnalysis.failureReason;
|
||||
variantAnalysisHistoryItem.resultCount = getTotalResultCount(variantAnalysis.scannedRepos);
|
||||
variantAnalysisHistoryItem.variantAnalysis = variantAnalysis;
|
||||
if (status === QueryStatus.Completed) {
|
||||
variantAnalysisHistoryItem.completed = true;
|
||||
}
|
||||
});
|
||||
await this.refreshTreeView();
|
||||
} else {
|
||||
void logger.log('Variant analysis status update event received for unknown variant analysis');
|
||||
}
|
||||
});
|
||||
|
||||
const variantAnalysisRemovedSubscription = this.variantAnalysisManager.onVariantAnalysisRemoved(async (variantAnalysis) => {
|
||||
const items = this.treeDataProvider.allHistory.filter(i => i.t === 'variant-analysis' && i.variantAnalysis.id === variantAnalysis.id);
|
||||
items.forEach(async (item) => {
|
||||
await this.removeRemoteQuery(item as RemoteQueryHistoryItem);
|
||||
});
|
||||
});
|
||||
|
||||
this.push(variantAnalysisAddedSubscription);
|
||||
this.push(variantAnalysisStatusUpdateSubscription);
|
||||
this.push(variantAnalysisRemovedSubscription);
|
||||
}
|
||||
|
||||
private registerToRemoteQueriesEvents() {
|
||||
@@ -663,6 +693,9 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (item.t === 'remote') {
|
||||
await this.remoteQueriesManager.rehydrateRemoteQuery(item.queryId, item.remoteQuery, item.status);
|
||||
}
|
||||
if (item.t === 'variant-analysis') {
|
||||
await this.variantAnalysisManager.rehydrateVariantAnalysis(item.variantAnalysis, item.status);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1074,7 +1107,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
queryText: encodeURIComponent(getQueryText(finalSingleItem)),
|
||||
});
|
||||
|
||||
const queryId = getQueryHistoryItemId(finalSingleItem);
|
||||
const queryId = getQueryId(finalSingleItem);
|
||||
|
||||
const uri = Uri.parse(
|
||||
`codeql:${queryId}.ql?${params.toString()}`, true
|
||||
|
||||
@@ -18,6 +18,7 @@ import { DatabaseInfo } from './pure/interface-types';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from './run-queries-shared';
|
||||
import { formatLegacyMessage } from './legacy-query-server/run-queries';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
|
||||
/**
|
||||
* query-results.ts
|
||||
@@ -158,10 +159,12 @@ export async function interpretResultsSarif(
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
let res;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
return { ...JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8')), t: 'SarifInterpretationData' };
|
||||
res = await sarifParser(interpretedResultsPath);
|
||||
} else {
|
||||
res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
}
|
||||
const res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { ...res, t: 'SarifInterpretationData' };
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { VariantAnalysisStatus } from './remote-queries/shared/variant-analysis';
|
||||
|
||||
export enum QueryStatus {
|
||||
InProgress = 'InProgress',
|
||||
Completed = 'Completed',
|
||||
Failed = 'Failed',
|
||||
}
|
||||
|
||||
export function variantAnalysisStatusToQueryStatus(status: VariantAnalysisStatus): QueryStatus {
|
||||
switch (status) {
|
||||
case VariantAnalysisStatus.Succeeded:
|
||||
return QueryStatus.Completed;
|
||||
case VariantAnalysisStatus.Failed:
|
||||
return QueryStatus.Failed;
|
||||
case VariantAnalysisStatus.InProgress:
|
||||
return QueryStatus.InProgress;
|
||||
case VariantAnalysisStatus.Canceled:
|
||||
return QueryStatus.Completed;
|
||||
default:
|
||||
assertNever(status);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,10 +4,7 @@ import * as fs from 'fs-extra';
|
||||
import { window, commands, Uri, ExtensionContext, QuickPickItem, workspace, ViewColumn } from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import {
|
||||
showInformationMessageWithAction,
|
||||
pluralize
|
||||
} from '../helpers';
|
||||
import { showInformationMessageWithAction } from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { createGist } from './gh-api/gh-actions-api-client';
|
||||
@@ -16,6 +13,7 @@ import { generateMarkdown } from './remote-queries-markdown-generation';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisResults, sumAnalysesResults } from './shared/analysis-result';
|
||||
import { RemoteQueryHistoryItem } from './remote-query-history-item';
|
||||
import { pluralize } from '../pure/word';
|
||||
|
||||
/**
|
||||
* Exports the results of the given or currently-selected remote query.
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
tryGetQueryMetadata,
|
||||
pluralize,
|
||||
tmpDir,
|
||||
} from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
@@ -24,6 +23,7 @@ import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { getErrorMessage, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { pluralize } from '../pure/word';
|
||||
import * as ghApiClient from './gh-api/gh-api-client';
|
||||
import { getRepositorySelection, isValidSelection, RepositorySelection } from './repository-selection';
|
||||
import { parseVariantAnalysisQueryLanguage, VariantAnalysisSubmission } from './shared/variant-analysis';
|
||||
@@ -130,11 +130,21 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
|
||||
// Clear the cliServer cache so that the previous qlpack text is purged from the CLI.
|
||||
await cliServer.clearCache();
|
||||
|
||||
let precompilationOpts: string[] = [];
|
||||
if (await cliServer.cliConstraints.supportsQlxRemote()) {
|
||||
const ccache = path.join(originalPackRoot, '.cache');
|
||||
precompilationOpts = ['--qlx',
|
||||
'--no-default-compilation-cache',
|
||||
`--compilation-cache=${ccache}`];
|
||||
} else if (await cliServer.cliConstraints.supportsNoPrecompile()) {
|
||||
precompilationOpts = ['--no-precompile'];
|
||||
}
|
||||
|
||||
const bundlePath = await getPackedBundlePath(queryPackDir);
|
||||
void logger.log(`Compiling and bundling query pack from ${queryPackDir} to ${bundlePath}. (This may take a while.)`);
|
||||
await cliServer.packInstall(queryPackDir);
|
||||
const workspaceFolders = getOnDiskWorkspaceFolders();
|
||||
await cliServer.packBundle(queryPackDir, workspaceFolders, bundlePath, false);
|
||||
await cliServer.packBundle(queryPackDir, workspaceFolders, bundlePath, precompilationOpts);
|
||||
const base64Pack = (await fs.readFile(bundlePath)).toString('base64');
|
||||
return {
|
||||
base64Pack,
|
||||
|
||||
@@ -10,7 +10,6 @@ export interface VariantAnalysisHistoryItem {
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
readonly historyItemId: string,
|
||||
variantAnalysis: VariantAnalysis;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
|
||||
@@ -26,10 +26,18 @@ import { getControllerRepo } from './run-remote-query';
|
||||
import { processUpdatedVariantAnalysis } from './variant-analysis-processor';
|
||||
import PQueue from 'p-queue';
|
||||
import { createTimestampFile } from '../helpers';
|
||||
import { QueryStatus } from '../query-status';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
|
||||
export class VariantAnalysisManager extends DisposableObject implements VariantAnalysisViewManager<VariantAnalysisView> {
|
||||
private readonly _onVariantAnalysisAdded = this.push(new EventEmitter<VariantAnalysis>());
|
||||
public readonly onVariantAnalysisAdded = this._onVariantAnalysisAdded.event;
|
||||
private readonly _onVariantAnalysisStatusUpdated = this.push(new EventEmitter<VariantAnalysis>());
|
||||
public readonly onVariantAnalysisStatusUpdated = this._onVariantAnalysisStatusUpdated.event;
|
||||
|
||||
private readonly _onVariantAnalysisRemoved = this.push(new EventEmitter<VariantAnalysis>());
|
||||
public readonly onVariantAnalysisRemoved = this._onVariantAnalysisRemoved.event;
|
||||
|
||||
private readonly variantAnalysisMonitor: VariantAnalysisMonitor;
|
||||
private readonly variantAnalysisResultsManager: VariantAnalysisResultsManager;
|
||||
@@ -52,6 +60,18 @@ export class VariantAnalysisManager extends DisposableObject implements VariantA
|
||||
this.variantAnalysisResultsManager.onResultLoaded(this.onRepoResultLoaded.bind(this));
|
||||
}
|
||||
|
||||
public async rehydrateVariantAnalysis(variantAnalysis: VariantAnalysis, status: QueryStatus) {
|
||||
if (!(await this.variantAnalysisRecordExists(variantAnalysis.id))) {
|
||||
// In this case, the variant analysis was deleted from disk, most likely because
|
||||
// it was purged by another workspace.
|
||||
this._onVariantAnalysisRemoved.fire(variantAnalysis);
|
||||
} else if (status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
await commands.executeCommand('codeQL.monitorVariantAnalysis', variantAnalysis);
|
||||
}
|
||||
}
|
||||
|
||||
public async showView(variantAnalysisId: number): Promise<void> {
|
||||
if (!this.views.has(variantAnalysisId)) {
|
||||
// The view will register itself with the manager, so we don't need to do anything here.
|
||||
@@ -92,6 +112,11 @@ export class VariantAnalysisManager extends DisposableObject implements VariantA
|
||||
await this.variantAnalysisResultsManager.loadResults(variantAnalysisId, this.getVariantAnalysisStorageLocation(variantAnalysisId), repositoryFullName);
|
||||
}
|
||||
|
||||
private async variantAnalysisRecordExists(variantAnalysisId: number): Promise<boolean> {
|
||||
const filePath = this.getVariantAnalysisStorageLocation(variantAnalysisId);
|
||||
return await fs.pathExists(filePath);
|
||||
}
|
||||
|
||||
private async onVariantAnalysisUpdated(variantAnalysis: VariantAnalysis | undefined): Promise<void> {
|
||||
if (!variantAnalysis) {
|
||||
return;
|
||||
@@ -100,6 +125,7 @@ export class VariantAnalysisManager extends DisposableObject implements VariantA
|
||||
this.variantAnalyses.set(variantAnalysis.id, variantAnalysis);
|
||||
|
||||
await this.getView(variantAnalysis.id)?.updateView(variantAnalysis);
|
||||
this._onVariantAnalysisStatusUpdated.fire(variantAnalysis);
|
||||
}
|
||||
|
||||
public async onVariantAnalysisSubmitted(variantAnalysis: VariantAnalysis): Promise<void> {
|
||||
|
||||
@@ -1,33 +1,34 @@
|
||||
import * as Sarif from 'sarif';
|
||||
import * as fs from 'fs-extra';
|
||||
import { parser } from 'stream-json';
|
||||
import { pick } from 'stream-json/filters/Pick';
|
||||
import Assembler = require('stream-json/Assembler');
|
||||
import { chain } from 'stream-chain';
|
||||
import { connectTo } from 'stream-json/Assembler';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
import { withParser } from 'stream-json/filters/Pick';
|
||||
|
||||
const DUMMY_TOOL: Sarif.Tool = { driver: { name: '' } };
|
||||
|
||||
export async function sarifParser(interpretedResultsPath: string): Promise<Sarif.Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const p = parser();
|
||||
const pipeline = chain([
|
||||
fs.createReadStream(interpretedResultsPath),
|
||||
p,
|
||||
pick({ filter: 'runs.0.results' })
|
||||
]);
|
||||
const pipeline = fs.createReadStream(interpretedResultsPath).pipe(withParser({ filter: 'runs.0.results' }));
|
||||
|
||||
// Creates JavaScript objects from the token stream
|
||||
const asm = Assembler.connectTo(pipeline);
|
||||
const asm = connectTo(pipeline);
|
||||
|
||||
// Returns a constructed Log object with the results or an empty array if no results were found.
|
||||
// Returns a constructed Log object with the results of an empty array if no results were found.
|
||||
// If the parser fails for any reason, it will reject the promise.
|
||||
return await new Promise((resolve, reject) => {
|
||||
let alreadyDone = false;
|
||||
pipeline.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
// If the parser pipeline completes before the assembler, we've reached end of file and have not found any results.
|
||||
pipeline.on('end', () => {
|
||||
if (!alreadyDone) {
|
||||
reject(new Error('Invalid SARIF file: expecting at least one run with result.'));
|
||||
}
|
||||
});
|
||||
|
||||
asm.on('done', (asm) => {
|
||||
|
||||
const log: Sarif.Log = {
|
||||
@@ -41,6 +42,7 @@ export async function sarifParser(interpretedResultsPath: string): Promise<Sarif
|
||||
};
|
||||
|
||||
resolve(log);
|
||||
alreadyDone = true;
|
||||
});
|
||||
});
|
||||
} catch (e) {
|
||||
|
||||
@@ -12,56 +12,8 @@ Welcome to the Storybook for **CodeQL for Visual Studio Code**! This Storybook c
|
||||
|
||||
### Switching themes
|
||||
|
||||
To switch between VSCode Dark+ and Light+ themes, you can make the following changes:
|
||||
|
||||
```diff
|
||||
diff --git a/extensions/ql-vscode/.storybook/manager.ts b/extensions/ql-vscode/.storybook/manager.ts
|
||||
--- a/extensions/ql-vscode/.storybook/manager.ts
|
||||
+++ b/extensions/ql-vscode/.storybook/manager.ts
|
||||
@@ -2,6 +2,6 @@ import { addons } from '@storybook/addons';
|
||||
import { themes } from '@storybook/theming';
|
||||
|
||||
addons.setConfig({
|
||||
- theme: themes.dark,
|
||||
+ theme: themes.light,
|
||||
enableShortcuts: false,
|
||||
});
|
||||
diff --git a/extensions/ql-vscode/.storybook/preview.ts b/extensions/ql-vscode/.storybook/preview.ts
|
||||
--- a/extensions/ql-vscode/.storybook/preview.ts
|
||||
+++ b/extensions/ql-vscode/.storybook/preview.ts
|
||||
@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
|
||||
// Allow all stories/components to use Codicons
|
||||
import '@vscode/codicons/dist/codicon.css';
|
||||
|
||||
-import '../src/stories/vscode-theme-dark.css';
|
||||
+import '../src/stories/vscode-theme-light.css';
|
||||
|
||||
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
|
||||
export const parameters = {
|
||||
@@ -19,14 +19,14 @@ export const parameters = {
|
||||
},
|
||||
- // Use a dark theme to be aligned with VSCode
|
||||
+ // Use a light theme to be aligned with VSCode
|
||||
docs: {
|
||||
- theme: themes.dark,
|
||||
+ theme: themes.light,
|
||||
},
|
||||
backgrounds: {
|
||||
- default: 'dark',
|
||||
+ default: 'light',
|
||||
values: [
|
||||
{
|
||||
- name: 'dark',
|
||||
- value: '#1e1e1e',
|
||||
+ name: 'light',
|
||||
+ value: '#ffffff',
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
You will need to restart Storybook to apply the theme change to the Storybook UI. The preview frame should update
|
||||
automatically.
|
||||
To switch between VSCode Dark+ and Light+ themes, use the button in the toolbar. This will not work on this document, so you'll only see
|
||||
the changes applied to a different story.
|
||||
|
||||
### Writing stories
|
||||
|
||||
|
||||
@@ -628,3 +628,10 @@ body {
|
||||
margin: 0;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for setting the background on the Storybook preview.
|
||||
*/
|
||||
body {
|
||||
background-color: var(--vscode-editor-background);
|
||||
}
|
||||
|
||||
@@ -626,3 +626,10 @@ body {
|
||||
margin: 0;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for setting the background on the Storybook preview.
|
||||
*/
|
||||
body {
|
||||
background-color: var(--vscode-editor-background);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import * as React from 'react';
|
||||
import { ResultRow } from '../../pure/bqrs-cli-types';
|
||||
import { zebraStripe } from './result-table-utils';
|
||||
import { selectedRowClassName, zebraStripe } from './result-table-utils';
|
||||
import RawTableValue from './RawTableValue';
|
||||
import { ScrollIntoViewHelper } from './scroll-into-view-helper';
|
||||
|
||||
interface Props {
|
||||
rowIndex: number;
|
||||
row: ResultRow;
|
||||
databaseUri: string;
|
||||
className?: string;
|
||||
selectedColumn?: number;
|
||||
onSelected?: (row: number, column: number) => void;
|
||||
scroller?: ScrollIntoViewHelper;
|
||||
}
|
||||
|
||||
export default function RawTableRow(props: Props) {
|
||||
@@ -15,14 +19,18 @@ export default function RawTableRow(props: Props) {
|
||||
<tr key={props.rowIndex} {...zebraStripe(props.rowIndex, props.className || '')}>
|
||||
<td key={-1}>{props.rowIndex + 1}</td>
|
||||
|
||||
{props.row.map((value, columnIndex) => (
|
||||
<td key={columnIndex}>
|
||||
<RawTableValue
|
||||
value={value}
|
||||
databaseUri={props.databaseUri}
|
||||
/>
|
||||
</td>
|
||||
))}
|
||||
{props.row.map((value, columnIndex) => {
|
||||
const isSelected = props.selectedColumn === columnIndex;
|
||||
return (
|
||||
<td ref={props.scroller?.ref(isSelected)} key={columnIndex} {...isSelected ? { className: selectedRowClassName } : {}}>
|
||||
<RawTableValue
|
||||
value={value}
|
||||
databaseUri={props.databaseUri}
|
||||
onSelected={() => props.onSelected?.(props.rowIndex, columnIndex)}
|
||||
/>
|
||||
</td>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { CellValue } from '../../pure/bqrs-cli-types';
|
||||
interface Props {
|
||||
value: CellValue;
|
||||
databaseUri: string;
|
||||
onSelected?: () => void;
|
||||
}
|
||||
|
||||
export default function RawTableValue(props: Props): JSX.Element {
|
||||
@@ -18,5 +19,5 @@ export default function RawTableValue(props: Props): JSX.Element {
|
||||
return <span>{renderLocation(undefined, rawValue.toString())}</span>;
|
||||
}
|
||||
|
||||
return renderLocation(rawValue.url, rawValue.label, props.databaseUri);
|
||||
return renderLocation(rawValue.url, rawValue.label, props.databaseUri, undefined, props.onSelected);
|
||||
}
|
||||
|
||||
@@ -3,9 +3,9 @@ import * as React from 'react';
|
||||
import * as Sarif from 'sarif';
|
||||
import * as Keys from '../../pure/result-keys';
|
||||
import * as octicons from './octicons';
|
||||
import { className, renderLocation, ResultTableProps, zebraStripe, selectableZebraStripe, jumpToLocation, nextSortDirection, emptyQueryResultsMessage } from './result-table-utils';
|
||||
import { onNavigation, NavigationEvent } from './results';
|
||||
import { InterpretedResultSet, SarifInterpretationData } from '../../pure/interface-types';
|
||||
import { className, renderLocation, ResultTableProps, selectableZebraStripe, jumpToLocation, nextSortDirection, emptyQueryResultsMessage } from './result-table-utils';
|
||||
import { onNavigation } from './results';
|
||||
import { InterpretedResultSet, NavigateMsg, NavigationDirection, SarifInterpretationData } from '../../pure/interface-types';
|
||||
import {
|
||||
parseSarifPlainTextMessage,
|
||||
parseSarifLocation,
|
||||
@@ -14,37 +14,40 @@ import {
|
||||
import { InterpretedResultsSortColumn, SortDirection, InterpretedResultsSortState } from '../../pure/interface-types';
|
||||
import { vscode } from '../vscode-api';
|
||||
import { isWholeFileLoc, isLineColumnLoc } from '../../pure/bqrs-utils';
|
||||
import { ScrollIntoViewHelper } from './scroll-into-view-helper';
|
||||
|
||||
export type PathTableProps = ResultTableProps & { resultSet: InterpretedResultSet<SarifInterpretationData> };
|
||||
export interface PathTableState {
|
||||
expanded: { [k: string]: boolean };
|
||||
selectedPathNode: undefined | Keys.PathNode;
|
||||
expanded: Set<string>;
|
||||
selectedItem: undefined | Keys.ResultKey;
|
||||
}
|
||||
|
||||
export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
private scroller = new ScrollIntoViewHelper();
|
||||
|
||||
constructor(props: PathTableProps) {
|
||||
super(props);
|
||||
this.state = { expanded: {}, selectedPathNode: undefined };
|
||||
this.state = { expanded: new Set<string>(), selectedItem: undefined };
|
||||
this.handleNavigationEvent = this.handleNavigationEvent.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of `indices`, toggle the first, and if we 'open' the
|
||||
* Given a list of `keys`, toggle the first, and if we 'open' the
|
||||
* first item, open all the rest as well. This mimics vscode's file
|
||||
* explorer tree view behavior.
|
||||
*/
|
||||
toggle(e: React.MouseEvent, indices: number[]) {
|
||||
toggle(e: React.MouseEvent, keys: Keys.ResultKey[]) {
|
||||
const keyStrings = keys.map(Keys.keyToString);
|
||||
this.setState(previousState => {
|
||||
if (previousState.expanded[indices[0]]) {
|
||||
return { expanded: { ...previousState.expanded, [indices[0]]: false } };
|
||||
}
|
||||
else {
|
||||
const expanded = { ...previousState.expanded };
|
||||
for (const index of indices) {
|
||||
expanded[index] = true;
|
||||
const expanded = new Set(previousState.expanded);
|
||||
if (previousState.expanded.has(keyStrings[0])) {
|
||||
expanded.delete(keyStrings[0]);
|
||||
} else {
|
||||
for (const str of keyStrings) {
|
||||
expanded.add(str);
|
||||
}
|
||||
return { expanded };
|
||||
}
|
||||
return { expanded };
|
||||
});
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
@@ -96,7 +99,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const rows: JSX.Element[] = [];
|
||||
const { numTruncatedResults, sourceLocationPrefix } = resultSet.interpretation;
|
||||
|
||||
function renderRelatedLocations(msg: string, relatedLocations: Sarif.Location[]): JSX.Element[] {
|
||||
function renderRelatedLocations(msg: string, relatedLocations: Sarif.Location[], resultKey: Keys.PathNode | Keys.Result | undefined): JSX.Element[] {
|
||||
const relatedLocationsById: { [k: string]: Sarif.Location } = {};
|
||||
for (const loc of relatedLocations) {
|
||||
relatedLocationsById[loc.id!] = loc;
|
||||
@@ -110,7 +113,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return <span key={i}>{part}</span>;
|
||||
} else {
|
||||
const renderedLocation = renderSarifLocationWithText(part.text, relatedLocationsById[part.dest],
|
||||
undefined);
|
||||
resultKey);
|
||||
return <span key={i}>{renderedLocation}</span>;
|
||||
}
|
||||
});
|
||||
@@ -122,16 +125,16 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return <span title={locationHint}>{msg}</span>;
|
||||
}
|
||||
|
||||
const updateSelectionCallback = (pathNodeKey: Keys.PathNode | undefined) => {
|
||||
const updateSelectionCallback = (resultKey: Keys.PathNode | Keys.Result | undefined) => {
|
||||
return () => {
|
||||
this.setState(previousState => ({
|
||||
...previousState,
|
||||
selectedPathNode: pathNodeKey
|
||||
selectedItem: resultKey
|
||||
}));
|
||||
};
|
||||
};
|
||||
|
||||
function renderSarifLocationWithText(text: string | undefined, loc: Sarif.Location, pathNodeKey: Keys.PathNode | undefined): JSX.Element | undefined {
|
||||
function renderSarifLocationWithText(text: string | undefined, loc: Sarif.Location, resultKey: Keys.PathNode | Keys.Result | undefined): JSX.Element | undefined {
|
||||
const parsedLoc = parseSarifLocation(loc, sourceLocationPrefix);
|
||||
if ('hint' in parsedLoc) {
|
||||
return renderNonLocation(text, parsedLoc.hint);
|
||||
@@ -141,7 +144,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
text,
|
||||
databaseUri,
|
||||
undefined,
|
||||
updateSelectionCallback(pathNodeKey)
|
||||
updateSelectionCallback(resultKey)
|
||||
);
|
||||
} else {
|
||||
return undefined;
|
||||
@@ -154,7 +157,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
*/
|
||||
function renderSarifLocation(
|
||||
loc: Sarif.Location,
|
||||
pathNodeKey: Keys.PathNode | undefined
|
||||
pathNodeKey: Keys.PathNode | Keys.Result | undefined
|
||||
): JSX.Element | undefined {
|
||||
const parsedLoc = parseSarifLocation(loc, sourceLocationPrefix);
|
||||
if ('hint' in parsedLoc) {
|
||||
@@ -184,7 +187,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
}
|
||||
}
|
||||
|
||||
const toggler: (indices: number[]) => (e: React.MouseEvent) => void = (indices) => {
|
||||
const toggler: (keys: Keys.ResultKey[]) => (e: React.MouseEvent) => void = (indices) => {
|
||||
return (e) => this.toggle(e, indices);
|
||||
};
|
||||
|
||||
@@ -192,24 +195,26 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return this.renderNoResults();
|
||||
}
|
||||
|
||||
let expansionIndex = 0;
|
||||
|
||||
resultSet.interpretation.data.runs[0].results.forEach((result, resultIndex) => {
|
||||
const resultKey: Keys.Result = { resultIndex };
|
||||
const text = result.message.text || '[no text]';
|
||||
const msg: JSX.Element[] =
|
||||
result.relatedLocations === undefined ?
|
||||
[<span key="0">{text}</span>] :
|
||||
renderRelatedLocations(text, result.relatedLocations);
|
||||
renderRelatedLocations(text, result.relatedLocations, resultKey);
|
||||
|
||||
const currentResultExpanded = this.state.expanded[expansionIndex];
|
||||
const currentResultExpanded = this.state.expanded.has(Keys.keyToString(resultKey));
|
||||
const indicator = currentResultExpanded ? octicons.chevronDown : octicons.chevronRight;
|
||||
const location = result.locations !== undefined && result.locations.length > 0 &&
|
||||
renderSarifLocation(result.locations[0], Keys.none);
|
||||
renderSarifLocation(result.locations[0], resultKey);
|
||||
const locationCells = <td className="vscode-codeql__location-cell">{location}</td>;
|
||||
|
||||
const selectedItem = this.state.selectedItem;
|
||||
const resultRowIsSelected = selectedItem?.resultIndex === resultIndex && selectedItem.pathIndex === undefined;
|
||||
|
||||
if (result.codeFlows === undefined) {
|
||||
rows.push(
|
||||
<tr key={resultIndex} {...zebraStripe(resultIndex)}>
|
||||
<tr ref={this.scroller.ref(resultRowIsSelected)} key={resultIndex} {...selectableZebraStripe(resultRowIsSelected, resultIndex)}>
|
||||
<td className="vscode-codeql__icon-cell">{octicons.info}</td>
|
||||
<td colSpan={3}>{msg}</td>
|
||||
{locationCells}
|
||||
@@ -220,12 +225,12 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const paths: Sarif.ThreadFlow[] = Keys.getAllPaths(result);
|
||||
|
||||
const indices = paths.length == 1 ?
|
||||
[expansionIndex, expansionIndex + 1] : /* if there's exactly one path, auto-expand
|
||||
* the path when expanding the result */
|
||||
[expansionIndex];
|
||||
[resultKey, { ...resultKey, pathIndex: 0 }] : /* if there's exactly one path, auto-expand
|
||||
* the path when expanding the result */
|
||||
[resultKey];
|
||||
|
||||
rows.push(
|
||||
<tr {...zebraStripe(resultIndex)} key={resultIndex}>
|
||||
<tr ref={this.scroller.ref(resultRowIsSelected)} {...selectableZebraStripe(resultRowIsSelected, resultIndex)} key={resultIndex}>
|
||||
<td className="vscode-codeql__icon-cell vscode-codeql__dropdown-cell" onMouseDown={toggler(indices)}>
|
||||
{indicator}
|
||||
</td>
|
||||
@@ -238,24 +243,23 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
{locationCells}
|
||||
</tr >
|
||||
);
|
||||
expansionIndex++;
|
||||
|
||||
paths.forEach((path, pathIndex) => {
|
||||
const pathKey = { resultIndex, pathIndex };
|
||||
const currentPathExpanded = this.state.expanded[expansionIndex];
|
||||
const currentPathExpanded = this.state.expanded.has(Keys.keyToString(pathKey));
|
||||
if (currentResultExpanded) {
|
||||
const indicator = currentPathExpanded ? octicons.chevronDown : octicons.chevronRight;
|
||||
const isPathSpecificallySelected = Keys.equalsNotUndefined(pathKey, selectedItem);
|
||||
rows.push(
|
||||
<tr {...zebraStripe(resultIndex)} key={`${resultIndex}-${pathIndex}`}>
|
||||
<tr ref={this.scroller.ref(isPathSpecificallySelected)} {...selectableZebraStripe(isPathSpecificallySelected, resultIndex)} key={`${resultIndex}-${pathIndex}`}>
|
||||
<td className="vscode-codeql__icon-cell"><span className="vscode-codeql__vertical-rule"></span></td>
|
||||
<td className="vscode-codeql__icon-cell vscode-codeql__dropdown-cell" onMouseDown={toggler([expansionIndex])}>{indicator}</td>
|
||||
<td className="vscode-codeql__icon-cell vscode-codeql__dropdown-cell" onMouseDown={toggler([pathKey])}>{indicator}</td>
|
||||
<td className="vscode-codeql__text-center" colSpan={3}>
|
||||
Path
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
}
|
||||
expansionIndex++;
|
||||
|
||||
if (currentResultExpanded && currentPathExpanded) {
|
||||
const pathNodes = path.locations;
|
||||
@@ -268,11 +272,11 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const additionalMsg = step.location !== undefined ?
|
||||
renderSarifLocation(step.location, pathNodeKey) :
|
||||
'';
|
||||
const isSelected = Keys.equalsNotUndefined(this.state.selectedPathNode, pathNodeKey);
|
||||
const isSelected = Keys.equalsNotUndefined(this.state.selectedItem, pathNodeKey);
|
||||
const stepIndex = pathNodeIndex + 1; // Convert to 1-based
|
||||
const zebraIndex = resultIndex + stepIndex;
|
||||
rows.push(
|
||||
<tr className={isSelected ? 'vscode-codeql__selected-path-node' : undefined} key={`${resultIndex}-${pathIndex}-${pathNodeIndex}`}>
|
||||
<tr ref={this.scroller.ref(isSelected)} className={isSelected ? 'vscode-codeql__selected-path-node' : undefined} key={`${resultIndex}-${pathIndex}-${pathNodeIndex}`}>
|
||||
<td className="vscode-codeql__icon-cell"><span className="vscode-codeql__vertical-rule"></span></td>
|
||||
<td className="vscode-codeql__icon-cell"><span className="vscode-codeql__vertical-rule"></span></td>
|
||||
<td {...selectableZebraStripe(isSelected, zebraIndex, 'vscode-codeql__path-index-cell')}>{stepIndex}</td>
|
||||
@@ -302,34 +306,103 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
</table>;
|
||||
}
|
||||
|
||||
private handleNavigationEvent(event: NavigationEvent) {
|
||||
private handleNavigationEvent(event: NavigateMsg) {
|
||||
this.setState(prevState => {
|
||||
const { selectedPathNode } = prevState;
|
||||
if (selectedPathNode === undefined) return prevState;
|
||||
const key = this.getNewSelection(prevState.selectedItem, event.direction);
|
||||
const data = this.props.resultSet.interpretation.data;
|
||||
|
||||
const path = Keys.getPath(this.props.resultSet.interpretation.data, selectedPathNode);
|
||||
if (path === undefined) return prevState;
|
||||
|
||||
const nextIndex = selectedPathNode.pathNodeIndex + event.direction;
|
||||
if (nextIndex < 0 || nextIndex >= path.locations.length) return prevState;
|
||||
|
||||
const sarifLoc = path.locations[nextIndex].location;
|
||||
if (sarifLoc === undefined) {
|
||||
return prevState;
|
||||
// Check if the selected node actually exists (bounds check) and get its location if relevant
|
||||
let jumpLocation: Sarif.Location | undefined;
|
||||
if (key.pathNodeIndex !== undefined) {
|
||||
jumpLocation = Keys.getPathNode(data, key);
|
||||
if (jumpLocation === undefined) {
|
||||
return prevState; // Result does not exist
|
||||
}
|
||||
} else if (key.pathIndex !== undefined) {
|
||||
if (Keys.getPath(data, key) === undefined) {
|
||||
return prevState; // Path does not exist
|
||||
}
|
||||
jumpLocation = undefined; // When selecting a 'path', don't jump anywhere.
|
||||
} else {
|
||||
jumpLocation = Keys.getResult(data, key)?.locations?.[0];
|
||||
if (jumpLocation === undefined) {
|
||||
return prevState; // Path step does not exist.
|
||||
}
|
||||
}
|
||||
if (jumpLocation !== undefined) {
|
||||
const parsedLocation = parseSarifLocation(jumpLocation, this.props.resultSet.interpretation.sourceLocationPrefix);
|
||||
if (!isNoLocation(parsedLocation)) {
|
||||
jumpToLocation(parsedLocation, this.props.databaseUri);
|
||||
}
|
||||
}
|
||||
|
||||
const loc = parseSarifLocation(sarifLoc, this.props.resultSet.interpretation.sourceLocationPrefix);
|
||||
if (isNoLocation(loc)) {
|
||||
return prevState;
|
||||
const expanded = new Set(prevState.expanded);
|
||||
if (event.direction === NavigationDirection.right) {
|
||||
// When stepping right, expand to ensure the selected node is visible
|
||||
expanded.add(Keys.keyToString({ resultIndex: key.resultIndex }));
|
||||
if (key.pathIndex !== undefined) {
|
||||
expanded.add(Keys.keyToString({ resultIndex: key.resultIndex, pathIndex: key.pathIndex }));
|
||||
}
|
||||
} else if (event.direction === NavigationDirection.left) {
|
||||
// When stepping left, collapse immediately
|
||||
expanded.delete(Keys.keyToString(key));
|
||||
} else {
|
||||
// When stepping up or down, collapse the previous node
|
||||
if (prevState.selectedItem !== undefined) {
|
||||
expanded.delete(Keys.keyToString(prevState.selectedItem));
|
||||
}
|
||||
}
|
||||
|
||||
jumpToLocation(loc, this.props.databaseUri);
|
||||
const newSelection = { ...selectedPathNode, pathNodeIndex: nextIndex };
|
||||
return { ...prevState, selectedPathNode: newSelection };
|
||||
this.scroller.scrollIntoViewOnNextUpdate();
|
||||
return {
|
||||
...prevState,
|
||||
expanded,
|
||||
selectedItem: key
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
private getNewSelection(key: Keys.ResultKey | undefined, direction: NavigationDirection): Keys.ResultKey {
|
||||
if (key === undefined) {
|
||||
return { resultIndex: 0 };
|
||||
}
|
||||
const { resultIndex, pathIndex, pathNodeIndex } = key;
|
||||
switch (direction) {
|
||||
case NavigationDirection.up:
|
||||
case NavigationDirection.down: {
|
||||
const delta = direction === NavigationDirection.up ? -1 : 1;
|
||||
if (key.pathNodeIndex !== undefined) {
|
||||
return { resultIndex, pathIndex: key.pathIndex, pathNodeIndex: key.pathNodeIndex + delta };
|
||||
} else if (pathIndex !== undefined) {
|
||||
return { resultIndex, pathIndex: pathIndex + delta };
|
||||
} else {
|
||||
return { resultIndex: resultIndex + delta };
|
||||
}
|
||||
}
|
||||
case NavigationDirection.left:
|
||||
if (key.pathNodeIndex !== undefined) {
|
||||
return { resultIndex, pathIndex: key.pathIndex };
|
||||
} else if (pathIndex !== undefined) {
|
||||
return { resultIndex };
|
||||
} else {
|
||||
return key;
|
||||
}
|
||||
case NavigationDirection.right:
|
||||
if (pathIndex === undefined) {
|
||||
return { resultIndex, pathIndex: 0 };
|
||||
} else if (pathNodeIndex === undefined) {
|
||||
return { resultIndex, pathIndex, pathNodeIndex: 0 };
|
||||
} else {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
componentDidUpdate() {
|
||||
this.scroller.update();
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.scroller.update();
|
||||
onNavigation.addListener(this.handleNavigationEvent);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import * as React from 'react';
|
||||
import { ResultTableProps, className, emptyQueryResultsMessage } from './result-table-utils';
|
||||
import { RAW_RESULTS_LIMIT, RawResultsSortState } from '../../pure/interface-types';
|
||||
import { ResultTableProps, className, emptyQueryResultsMessage, jumpToLocation } from './result-table-utils';
|
||||
import { RAW_RESULTS_LIMIT, RawResultsSortState, NavigateMsg, NavigationDirection } from '../../pure/interface-types';
|
||||
import { RawTableResultSet } from '../../pure/interface-types';
|
||||
import RawTableHeader from './RawTableHeader';
|
||||
import RawTableRow from './RawTableRow';
|
||||
import { ResultRow } from '../../pure/bqrs-cli-types';
|
||||
import { onNavigation } from './results';
|
||||
import { tryGetResolvableLocation } from '../../pure/bqrs-utils';
|
||||
import { ScrollIntoViewHelper } from './scroll-into-view-helper';
|
||||
|
||||
export type RawTableProps = ResultTableProps & {
|
||||
resultSet: RawTableResultSet;
|
||||
@@ -12,9 +15,25 @@ export type RawTableProps = ResultTableProps & {
|
||||
offset: number;
|
||||
};
|
||||
|
||||
export class RawTable extends React.Component<RawTableProps, Record<string, never>> {
|
||||
interface RawTableState {
|
||||
selectedItem?: { row: number, column: number };
|
||||
}
|
||||
|
||||
export class RawTable extends React.Component<RawTableProps, RawTableState> {
|
||||
private scroller = new ScrollIntoViewHelper();
|
||||
|
||||
constructor(props: RawTableProps) {
|
||||
super(props);
|
||||
this.setSelection = this.setSelection.bind(this);
|
||||
this.handleNavigationEvent = this.handleNavigationEvent.bind(this);
|
||||
this.state = {};
|
||||
}
|
||||
|
||||
private setSelection(row: number, column: number) {
|
||||
this.setState(prev => ({
|
||||
...prev,
|
||||
selectedItem: { row, column }
|
||||
}));
|
||||
}
|
||||
|
||||
render(): React.ReactNode {
|
||||
@@ -37,6 +56,9 @@ export class RawTable extends React.Component<RawTableProps, Record<string, neve
|
||||
rowIndex={rowIndex + this.props.offset}
|
||||
row={row}
|
||||
databaseUri={databaseUri}
|
||||
selectedColumn={this.state.selectedItem?.row === rowIndex ? this.state.selectedItem?.column : undefined}
|
||||
onSelected={this.setSelection}
|
||||
scroller={this.scroller}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -58,4 +80,75 @@ export class RawTable extends React.Component<RawTableProps, Record<string, neve
|
||||
</tbody>
|
||||
</table>;
|
||||
}
|
||||
|
||||
private handleNavigationEvent(event: NavigateMsg) {
|
||||
switch (event.direction) {
|
||||
case NavigationDirection.up: {
|
||||
this.navigateWithDelta(-1, 0);
|
||||
break;
|
||||
}
|
||||
case NavigationDirection.down: {
|
||||
this.navigateWithDelta(1, 0);
|
||||
break;
|
||||
}
|
||||
case NavigationDirection.left: {
|
||||
this.navigateWithDelta(0, -1);
|
||||
break;
|
||||
}
|
||||
case NavigationDirection.right: {
|
||||
this.navigateWithDelta(0, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private navigateWithDelta(rowDelta: number, columnDelta: number) {
|
||||
this.setState(prevState => {
|
||||
const numberOfAlerts = this.props.resultSet.rows.length;
|
||||
if (numberOfAlerts === 0) {
|
||||
return prevState;
|
||||
}
|
||||
const currentRow = prevState.selectedItem?.row;
|
||||
const nextRow = currentRow === undefined
|
||||
? 0
|
||||
: (currentRow + rowDelta);
|
||||
if (nextRow < 0 || nextRow >= numberOfAlerts) {
|
||||
return prevState;
|
||||
}
|
||||
const currentColumn = prevState.selectedItem?.column;
|
||||
const nextColumn = currentColumn === undefined
|
||||
? 0
|
||||
: (currentColumn + columnDelta);
|
||||
// Jump to the location of the new cell
|
||||
const rowData = this.props.resultSet.rows[nextRow];
|
||||
if (nextColumn < 0 || nextColumn >= rowData.length) {
|
||||
return prevState;
|
||||
}
|
||||
const cellData = rowData[nextColumn];
|
||||
if (cellData != null && typeof cellData === 'object') {
|
||||
const location = tryGetResolvableLocation(cellData.url);
|
||||
if (location !== undefined) {
|
||||
jumpToLocation(location, this.props.databaseUri);
|
||||
}
|
||||
}
|
||||
this.scroller.scrollIntoViewOnNextUpdate();
|
||||
return {
|
||||
...prevState,
|
||||
selectedItem: { row: nextRow, column: nextColumn }
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
componentDidUpdate() {
|
||||
this.scroller.update();
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.scroller.update();
|
||||
onNavigation.addListener(this.handleNavigationEvent);
|
||||
}
|
||||
|
||||
componentWillUnmount() {
|
||||
onNavigation.removeListener(this.handleNavigationEvent);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,12 +6,12 @@ import {
|
||||
IntoResultsViewMsg,
|
||||
SortedResultSetInfo,
|
||||
RawResultsSortState,
|
||||
NavigatePathMsg,
|
||||
QueryMetadata,
|
||||
ResultsPaths,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
ParsedResultSets,
|
||||
NavigateMsg,
|
||||
} from '../../pure/interface-types';
|
||||
import { EventHandlers as EventHandlerList } from './event-handler-list';
|
||||
import { ResultTables } from './result-tables';
|
||||
@@ -62,12 +62,10 @@ interface ResultsViewState {
|
||||
isExpectingResultsUpdate: boolean;
|
||||
}
|
||||
|
||||
export type NavigationEvent = NavigatePathMsg;
|
||||
|
||||
/**
|
||||
* Event handlers to be notified of navigation events coming from outside the webview.
|
||||
*/
|
||||
export const onNavigation = new EventHandlerList<NavigationEvent>();
|
||||
export const onNavigation = new EventHandlerList<NavigateMsg>();
|
||||
|
||||
/**
|
||||
* A minimal state container for displaying results.
|
||||
@@ -145,7 +143,7 @@ export class ResultsApp extends React.Component<Record<string, never>, ResultsVi
|
||||
isExpectingResultsUpdate: true,
|
||||
});
|
||||
break;
|
||||
case 'navigatePath':
|
||||
case 'navigate':
|
||||
onNavigation.fire(msg);
|
||||
break;
|
||||
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import * as React from 'react';
|
||||
|
||||
/**
|
||||
* Some book-keeping needed to scroll a specific HTML element into view in a React component.
|
||||
*/
|
||||
export class ScrollIntoViewHelper {
|
||||
private selectedElementRef = React.createRef<HTMLElement | any>(); // need 'any' to work around typing bug in React
|
||||
private shouldScrollIntoView = true;
|
||||
|
||||
/**
|
||||
* If `isSelected` is true, gets the `ref={}` attribute to use for an element that we might want to scroll into view.
|
||||
*/
|
||||
public ref(isSelected: boolean) {
|
||||
return isSelected ? this.selectedElementRef : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Causes the element whose `ref={}` was set to be scrolled into view after the next render.
|
||||
*/
|
||||
public scrollIntoViewOnNextUpdate() {
|
||||
this.shouldScrollIntoView = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should be called from `componentDidUpdate` and `componentDidMount`.
|
||||
*
|
||||
* Scrolls the component into view if requested.
|
||||
*/
|
||||
public update() {
|
||||
if (!this.shouldScrollIntoView) {
|
||||
return;
|
||||
}
|
||||
this.shouldScrollIntoView = false;
|
||||
const element = this.selectedElementRef.current as HTMLElement | null;
|
||||
if (element == null) {
|
||||
return;
|
||||
}
|
||||
const rect = element.getBoundingClientRect();
|
||||
// The selected item's bounding box might be on screen, but hidden underneath the sticky header
|
||||
// which overlaps the table view. As a workaround we hardcode a fixed distance from the top which
|
||||
// we consider to be obscured. It does not have to exact, as it's just a threshold for when to scroll.
|
||||
const heightOfStickyHeader = 30;
|
||||
if (rect.top < heightOfStickyHeader || rect.bottom > window.innerHeight) {
|
||||
element.scrollIntoView({
|
||||
block: 'center', // vertically align to center
|
||||
});
|
||||
}
|
||||
if (rect.left < 0 || rect.right > window.innerWidth) {
|
||||
element.scrollIntoView({
|
||||
block: 'nearest',
|
||||
inline: 'nearest', // horizontally align as little as possible
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,15 +14,6 @@ import { CUSTOM_CODEQL_PATH_SETTING } from '../../config';
|
||||
|
||||
export const DB_URL = 'https://github.com/github/vscode-codeql/files/5586722/simple-db.zip';
|
||||
|
||||
process.addListener('unhandledRejection', (reason) => {
|
||||
if (reason instanceof Error && reason.message === 'Canceled') {
|
||||
console.log('Cancellation requested after the test has ended.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
fail(String(reason));
|
||||
}
|
||||
});
|
||||
|
||||
// We need to resolve the path, but the final three segments won't exist until later, so we only resolve the
|
||||
// first portion of the path.
|
||||
export const dbLoc = path.join(fs.realpathSync(path.join(__dirname, '../../../')), 'build/tests/db.zip');
|
||||
@@ -84,7 +75,11 @@ export default function(mocha: Mocha) {
|
||||
// This shuts down the extension and can only be run after all tests have completed.
|
||||
// If this is not called, then the test process will hang.
|
||||
if ('dispose' in extension) {
|
||||
extension.dispose();
|
||||
try {
|
||||
extension.dispose();
|
||||
} catch (e) {
|
||||
console.warn('Failed to dispose extension', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -92,7 +87,13 @@ export default function(mocha: Mocha) {
|
||||
// ensure temp directory is cleaned up.
|
||||
(mocha.options as any).globalTeardown.push(
|
||||
() => {
|
||||
removeStorage?.();
|
||||
try {
|
||||
removeStorage?.();
|
||||
} catch (e) {
|
||||
// we are exiting anyway so don't worry about it.
|
||||
// most likely the directory this is a test on Windows and some files are locked.
|
||||
console.warn(`Failed to remove storage directory '${storagePath}': ${e}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ import { QueryResultType } from '../../pure/new-messages';
|
||||
import { cleanDatabases, dbLoc, storagePath } from './global.helper';
|
||||
import { importArchiveDatabase } from '../../databaseFetcher';
|
||||
|
||||
|
||||
const baseDir = path.join(__dirname, '../../../test/data');
|
||||
|
||||
const tmpDir = tmp.dirSync({ prefix: 'query_test_', keep: false, unsafeCleanup: true });
|
||||
@@ -106,7 +105,7 @@ describe('using the new query server', function() {
|
||||
cliServer = extension.cliServer;
|
||||
|
||||
cliServer.quiet = true;
|
||||
if (!(await cliServer.cliConstraints.supportsNewQueryServer())) {
|
||||
if (!(await cliServer.cliConstraints.supportsNewQueryServerForTests())) {
|
||||
this.ctx.skip();
|
||||
}
|
||||
qs = new QueryServerClient({
|
||||
|
||||
@@ -100,7 +100,6 @@ describe('Remote queries', function() {
|
||||
const querySubmissionResult = await runRemoteQuery(cli, credentials, fileUri, true, progress, cancellationTokenSource.token, variantAnalysisManager);
|
||||
expect(querySubmissionResult).to.be.ok;
|
||||
const queryPackRootDir = querySubmissionResult!.queryDirPath!;
|
||||
printDirectoryContents(queryPackRootDir);
|
||||
|
||||
// to retrieve the list of repositories
|
||||
expect(showQuickPickSpy).to.have.been.calledOnce;
|
||||
@@ -113,7 +112,6 @@ describe('Remote queries', function() {
|
||||
expect(fs.readdirSync(queryPackRootDir).find(f => f.startsWith('qlpack-') && f.endsWith('-generated.tgz'))).not.to.be.undefined;
|
||||
|
||||
const queryPackDir = path.join(queryPackRootDir, 'query-pack');
|
||||
printDirectoryContents(queryPackDir);
|
||||
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'lib.qll'))).to.be.true;
|
||||
@@ -128,7 +126,6 @@ describe('Remote queries', function() {
|
||||
|
||||
// the compiled pack
|
||||
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/codeql-remote/query/0.0.0/');
|
||||
printDirectoryContents(compiledPackDir);
|
||||
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'lib.qll'))).to.be.true;
|
||||
@@ -171,11 +168,9 @@ describe('Remote queries', function() {
|
||||
// check a few files that we know should exist and others that we know should not
|
||||
|
||||
// the tarball to deliver to the server
|
||||
printDirectoryContents(queryPackRootDir);
|
||||
expect(fs.readdirSync(queryPackRootDir).find(f => f.startsWith('qlpack-') && f.endsWith('-generated.tgz'))).not.to.be.undefined;
|
||||
|
||||
const queryPackDir = path.join(queryPackRootDir, 'query-pack');
|
||||
printDirectoryContents(queryPackDir);
|
||||
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'qlpack.yml'))).to.be.true;
|
||||
@@ -189,7 +184,6 @@ describe('Remote queries', function() {
|
||||
|
||||
// the compiled pack
|
||||
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/codeql-remote/query/0.0.0/');
|
||||
printDirectoryContents(compiledPackDir);
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'qlpack.yml'))).to.be.true;
|
||||
verifyQlPack(path.join(compiledPackDir, 'qlpack.yml'), 'in-pack.ql', '0.0.0', await pathSerializationBroken());
|
||||
@@ -208,7 +202,6 @@ describe('Remote queries', function() {
|
||||
expect(qlpackContents.dependencies?.['codeql/javascript-all']).to.equal('*');
|
||||
|
||||
const libraryDir = path.join(compiledPackDir, '.codeql/libraries/codeql');
|
||||
printDirectoryContents(libraryDir);
|
||||
const packNames = fs.readdirSync(libraryDir).sort();
|
||||
|
||||
// check dependencies.
|
||||
@@ -233,11 +226,9 @@ describe('Remote queries', function() {
|
||||
// check a few files that we know should exist and others that we know should not
|
||||
|
||||
// the tarball to deliver to the server
|
||||
printDirectoryContents(queryPackRootDir);
|
||||
expect(fs.readdirSync(queryPackRootDir).find(f => f.startsWith('qlpack-') && f.endsWith('-generated.tgz'))).not.to.be.undefined;
|
||||
|
||||
const queryPackDir = path.join(queryPackRootDir, 'query-pack');
|
||||
printDirectoryContents(queryPackDir);
|
||||
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'subfolder/in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(queryPackDir, 'qlpack.yml'))).to.be.true;
|
||||
@@ -251,7 +242,6 @@ describe('Remote queries', function() {
|
||||
|
||||
// the compiled pack
|
||||
const compiledPackDir = path.join(queryPackDir, '.codeql/pack/codeql-remote/query/0.0.0/');
|
||||
printDirectoryContents(compiledPackDir);
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'otherfolder/lib.qll'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'subfolder/in-pack.ql'))).to.be.true;
|
||||
expect(fs.existsSync(path.join(compiledPackDir, 'qlpack.yml'))).to.be.true;
|
||||
@@ -270,7 +260,6 @@ describe('Remote queries', function() {
|
||||
expect(qlpackContents.dependencies?.['codeql/javascript-all']).to.equal('*');
|
||||
|
||||
const libraryDir = path.join(compiledPackDir, '.codeql/libraries/codeql');
|
||||
printDirectoryContents(libraryDir);
|
||||
const packNames = fs.readdirSync(libraryDir).sort();
|
||||
|
||||
// check dependencies.
|
||||
@@ -399,12 +388,4 @@ describe('Remote queries', function() {
|
||||
function getFile(file: string): Uri {
|
||||
return Uri.file(path.join(baseDir, file));
|
||||
}
|
||||
|
||||
function printDirectoryContents(dir: string) {
|
||||
console.log(`DIR ${dir}`);
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.log(`DIR ${dir} does not exist`);
|
||||
}
|
||||
fs.readdirSync(dir).sort().forEach(f => console.log(` ${f}`));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -30,21 +30,12 @@ import { workspace } from 'vscode';
|
||||
* exists. And the cli will not be re-downloaded if the zip already exists.
|
||||
*/
|
||||
|
||||
process.on('unhandledRejection', e => {
|
||||
console.error('Unhandled rejection.');
|
||||
console.error(e);
|
||||
// Must use a setTimeout in order to ensure the log is fully flushed before exiting
|
||||
setTimeout(() => {
|
||||
process.exit(-1);
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
const _1MB = 1024 * 1024;
|
||||
const _10MB = _1MB * 10;
|
||||
|
||||
// CLI version to test. Hard code the latest as default. And be sure
|
||||
// to update the env if it is not otherwise set.
|
||||
const CLI_VERSION = process.env.CLI_VERSION || 'v2.11.1';
|
||||
const CLI_VERSION = process.env.CLI_VERSION || 'v2.11.2';
|
||||
process.env.CLI_VERSION = CLI_VERSION;
|
||||
|
||||
// Base dir where CLIs will be downloaded into
|
||||
|
||||
@@ -16,6 +16,15 @@ process.on('unhandledRejection', e => {
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
process.on('exit', code => {
|
||||
// If the exit code is 7, then the test runner has completed, but
|
||||
// there was an error in exiting vscode.
|
||||
if (code === 7) {
|
||||
console.warn('Attempted to exit with code 7. This is likely due to a failure to exit vscode. Ignoring this and exiting with code 0.');
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function that runs all Mocha tests found in the
|
||||
* given test root directory.
|
||||
|
||||
@@ -13,12 +13,3 @@ chai.use(sinonChai);
|
||||
export function run(): Promise<void> {
|
||||
return runTestsInDirectory(__dirname);
|
||||
}
|
||||
|
||||
process.addListener('unhandledRejection', (reason) => {
|
||||
if (reason instanceof Error && reason.message === 'Canceled') {
|
||||
console.log('Cancellation requested after the test has ended.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
fail(String(reason));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -67,7 +67,9 @@ describe('query-history', () => {
|
||||
} as any as RemoteQueriesManager;
|
||||
|
||||
variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: sandbox.stub()
|
||||
onVariantAnalysisAdded: sandbox.stub(),
|
||||
onVariantAnalysisStatusUpdated: sandbox.stub(),
|
||||
onVariantAnalysisRemoved: sandbox.stub()
|
||||
} as any as VariantAnalysisManager;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as sinon from 'sinon';
|
||||
import { LocalQueryInfo, InitialQueryInfo, interpretResultsSarif } from '../../query-results';
|
||||
import { QueryWithResults } from '../../run-queries-shared';
|
||||
@@ -11,6 +12,7 @@ import { tmpDir } from '../../helpers';
|
||||
import { slurpQueryHistory, splatQueryHistory } from '../../query-serialization';
|
||||
import { formatLegacyMessage, QueryInProgress } from '../../legacy-query-server/run-queries';
|
||||
import { EvaluationResult, QueryResultType } from '../../pure/legacy-messages';
|
||||
import Sinon = require('sinon');
|
||||
|
||||
describe('query-results', () => {
|
||||
let disposeSpy: sinon.SinonSpy;
|
||||
@@ -155,68 +157,213 @@ describe('query-results', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should interpretResultsSarif', async () => {
|
||||
const spy = sandbox.mock();
|
||||
spy.returns({ a: '1234' });
|
||||
const mockServer = {
|
||||
interpretBqrsSarif: spy
|
||||
} as unknown as CodeQLCliServer;
|
||||
|
||||
const interpretedResultsPath = path.join(tmpDir.name, 'interpreted.json');
|
||||
const resultsPath = '123';
|
||||
const sourceInfo = {};
|
||||
describe('interpretResultsSarif', () => {
|
||||
let mockServer: CodeQLCliServer;
|
||||
let spy: Sinon.SinonExpectation;
|
||||
const metadata = {
|
||||
kind: 'my-kind',
|
||||
id: 'my-id' as string | undefined,
|
||||
scored: undefined
|
||||
};
|
||||
const results1 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
const resultsPath = '123';
|
||||
const interpretedResultsPath = path.join(tmpDir.name, 'interpreted.json');
|
||||
const sourceInfo = {};
|
||||
|
||||
expect(results1).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
metadata,
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
);
|
||||
beforeEach(() => {
|
||||
spy = sandbox.mock();
|
||||
spy.returns({ a: '1234' });
|
||||
|
||||
// Try again, but with no id
|
||||
spy.reset();
|
||||
spy.returns({ a: '1234' });
|
||||
delete metadata.id;
|
||||
const results2 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
expect(results2).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
{ kind: 'my-kind', id: 'dummy-id', scored: undefined },
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
);
|
||||
mockServer = {
|
||||
interpretBqrsSarif: spy
|
||||
} as unknown as CodeQLCliServer;
|
||||
});
|
||||
|
||||
// try a third time, but this time we get from file
|
||||
spy.reset();
|
||||
fs.writeFileSync(interpretedResultsPath, JSON.stringify({
|
||||
a: 6
|
||||
}), 'utf8');
|
||||
const results3 = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
expect(results3).to.deep.eq({ a: 6, t: 'SarifInterpretationData' });
|
||||
afterEach(async () => {
|
||||
sandbox.restore();
|
||||
safeDel(interpretedResultsPath);
|
||||
});
|
||||
|
||||
it('should interpretResultsSarif', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
const results = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
|
||||
expect(results).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
metadata,
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
);
|
||||
});
|
||||
|
||||
it('should interpretBqrsSarif without ID', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
delete metadata.id;
|
||||
const results = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
expect(results).to.deep.eq({ a: '1234', t: 'SarifInterpretationData' });
|
||||
expect(spy).to.have.been.calledWith(
|
||||
{ kind: 'my-kind', id: 'dummy-id', scored: undefined },
|
||||
resultsPath, interpretedResultsPath, sourceInfo
|
||||
);
|
||||
});
|
||||
|
||||
it('should use sarifParser on a valid small SARIF file', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
fs.writeFileSync(interpretedResultsPath, JSON.stringify({
|
||||
runs: [{ results: [] }] // A run needs results to succeed.
|
||||
}), 'utf8');
|
||||
const results = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
// We do not re-interpret if we are reading from a SARIF file.
|
||||
expect(spy).to.not.have.been.called;
|
||||
|
||||
expect(results).to.have.property('t', 'SarifInterpretationData');
|
||||
expect(results).to.have.nested.property('runs[0].results');
|
||||
});
|
||||
|
||||
it('should throw an error on an invalid small SARIF file', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
fs.writeFileSync(interpretedResultsPath, JSON.stringify({
|
||||
a: '6' // Invalid: no runs or results
|
||||
}), 'utf8');
|
||||
|
||||
await expect(
|
||||
interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo)
|
||||
).to.be.rejectedWith('Parsing output of interpretation failed: Invalid SARIF file: expecting at least one run with result.');
|
||||
|
||||
// We do not attempt to re-interpret if we are reading from a SARIF file.
|
||||
expect(spy).to.not.have.been.called;
|
||||
});
|
||||
|
||||
it('should use sarifParser on a valid large SARIF file', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
const validSarifStream = fs.createWriteStream(interpretedResultsPath, { flags: 'w' });
|
||||
|
||||
const finished = new Promise((res, rej) => {
|
||||
validSarifStream.addListener('close', res);
|
||||
validSarifStream.addListener('error', rej);
|
||||
});
|
||||
|
||||
validSarifStream.write(JSON.stringify({
|
||||
runs: [{ results: [] }] // A run needs results to succeed.
|
||||
}), 'utf8');
|
||||
|
||||
validSarifStream.write('[', 'utf8');
|
||||
const iterations = 1_000_000;
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
validSarifStream.write(JSON.stringify({
|
||||
a: '6'
|
||||
}), 'utf8');
|
||||
if (i < iterations - 1) {
|
||||
validSarifStream.write(',');
|
||||
}
|
||||
}
|
||||
validSarifStream.write(']', 'utf8');
|
||||
validSarifStream.end();
|
||||
await finished;
|
||||
|
||||
// We need to sleep to wait for MSFT Defender to scan the file
|
||||
// so that it can be read by our test.
|
||||
if (os.platform() === 'win32') {
|
||||
await sleep(10_000);
|
||||
}
|
||||
|
||||
const results = await interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo
|
||||
);
|
||||
// We do not re-interpret if we are reading from a SARIF file.
|
||||
expect(spy).to.not.have.been.called;
|
||||
|
||||
expect(results).to.have.property('t', 'SarifInterpretationData');
|
||||
expect(results).to.have.nested.property('runs[0].results');
|
||||
});
|
||||
|
||||
it('should throw an error on an invalid large SARIF file', async function() {
|
||||
// up to 2 minutes per test
|
||||
this.timeout(2 * 60 * 1000);
|
||||
|
||||
// There is a problem on Windows where the file at the prior path isn't able
|
||||
// to be deleted or written to, so we rename the path for this last test.
|
||||
const interpretedResultsPath = path.join(tmpDir.name, 'interpreted-invalid.json');
|
||||
const invalidSarifStream = fs.createWriteStream(interpretedResultsPath, { flags: 'w' });
|
||||
|
||||
const finished = new Promise((res, rej) => {
|
||||
invalidSarifStream.addListener('close', res);
|
||||
invalidSarifStream.addListener('error', rej);
|
||||
});
|
||||
|
||||
invalidSarifStream.write('[', 'utf8');
|
||||
const iterations = 1_000_000;
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
invalidSarifStream.write(JSON.stringify({
|
||||
a: '6'
|
||||
}), 'utf8');
|
||||
if (i < iterations - 1) {
|
||||
invalidSarifStream.write(',');
|
||||
}
|
||||
}
|
||||
invalidSarifStream.write(']', 'utf8');
|
||||
invalidSarifStream.end();
|
||||
await finished;
|
||||
|
||||
// We need to sleep to wait for MSFT Defender to scan the file
|
||||
// so that it can be read by our test.
|
||||
if (os.platform() === 'win32') {
|
||||
await sleep(10_000);
|
||||
}
|
||||
|
||||
await expect(
|
||||
interpretResultsSarif(
|
||||
mockServer,
|
||||
metadata,
|
||||
{
|
||||
resultsPath, interpretedResultsPath
|
||||
},
|
||||
sourceInfo as SourceInfo)
|
||||
).to.be.rejectedWith('Parsing output of interpretation failed: Invalid SARIF file: expecting at least one run with result.');
|
||||
|
||||
// We do not attempt to re-interpret if we are reading from a SARIF file.
|
||||
expect(spy).to.not.have.been.called;
|
||||
});
|
||||
});
|
||||
|
||||
describe('splat and slurp', () => {
|
||||
@@ -300,6 +447,18 @@ describe('query-results', () => {
|
||||
});
|
||||
});
|
||||
|
||||
function safeDel(file: string) {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function createMockQueryWithResults(
|
||||
queryPath: string,
|
||||
didRunSuccessfully = true,
|
||||
|
||||
@@ -77,7 +77,9 @@ describe('Remote queries and query history manager', function() {
|
||||
} as any as RemoteQueriesManager;
|
||||
|
||||
variantAnalysisManagerStub = {
|
||||
onVariantAnalysisAdded: sandbox.stub()
|
||||
onVariantAnalysisAdded: sandbox.stub(),
|
||||
onVariantAnalysisStatusUpdated: sandbox.stub(),
|
||||
onVariantAnalysisRemoved: sandbox.stub()
|
||||
} as any as VariantAnalysisManager;
|
||||
});
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ import * as tmp from 'tmp-promise';
|
||||
// but we can be tricky and import directly from the out file.
|
||||
import { TestOptions } from 'vscode-test/out/runTest';
|
||||
|
||||
|
||||
// For CI purposes we want to leave this at 'stable' to catch any bugs
|
||||
// that might show up with new vscode versions released, even though
|
||||
// this makes testing not-quite-pure, but it can be changed for local
|
||||
@@ -34,24 +33,21 @@ enum TestDir {
|
||||
* Run an integration test suite `suite`, retrying if it segfaults, at
|
||||
* most `tries` times.
|
||||
*/
|
||||
async function runTestsWithRetryOnSegfault(suite: TestOptions, tries: number): Promise<void> {
|
||||
async function runTestsWithRetryOnSegfault(suite: TestOptions, tries: number): Promise<number> {
|
||||
for (let t = 0; t < tries; t++) {
|
||||
try {
|
||||
// Download and unzip VS Code if necessary, and run the integration test suite.
|
||||
await runTests(suite);
|
||||
return;
|
||||
return await runTests(suite);
|
||||
} catch (err) {
|
||||
if (err === 'SIGSEGV') {
|
||||
console.error('Test runner segfaulted.');
|
||||
if (t < tries - 1)
|
||||
console.error('Retrying...');
|
||||
}
|
||||
else if (os.platform() === 'win32') {
|
||||
} else if (os.platform() === 'win32') {
|
||||
console.error(`Test runner caught exception (${err})`);
|
||||
if (t < tries - 1)
|
||||
console.error('Retrying...');
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@@ -67,6 +63,7 @@ const tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
* See https://github.com/microsoft/vscode-test/blob/master/sample/test/runTest.ts
|
||||
*/
|
||||
async function main() {
|
||||
let exitCode = 0;
|
||||
try {
|
||||
const extensionDevelopmentPath = path.resolve(__dirname, '../..');
|
||||
const vscodeExecutablePath = await downloadAndUnzipVSCode(VSCODE_VERSION);
|
||||
@@ -100,7 +97,7 @@ async function main() {
|
||||
const launchArgs = getLaunchArgs(dir as TestDir);
|
||||
console.log(`Next integration test dir: ${dir}`);
|
||||
console.log(`Launch args: ${launchArgs}`);
|
||||
await runTestsWithRetryOnSegfault({
|
||||
exitCode = await runTestsWithRetryOnSegfault({
|
||||
version: VSCODE_VERSION,
|
||||
vscodeExecutablePath,
|
||||
extensionDevelopmentPath,
|
||||
@@ -111,8 +108,12 @@ async function main() {
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Unexpected exception while running tests: ${err}`);
|
||||
console.error((err as Error).stack);
|
||||
process.exit(1);
|
||||
if (err instanceof Error) {
|
||||
console.error(err.stack);
|
||||
}
|
||||
exitCode = 1;
|
||||
} finally {
|
||||
process.exit(exitCode);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,6 +126,7 @@ function getLaunchArgs(dir: TestDir) {
|
||||
return [
|
||||
'--disable-extensions',
|
||||
'--disable-gpu',
|
||||
'--disable-workspace-trust',
|
||||
'--user-data-dir=' + path.join(tmpDir.name, dir, 'user-data')
|
||||
];
|
||||
|
||||
@@ -132,6 +134,7 @@ function getLaunchArgs(dir: TestDir) {
|
||||
return [
|
||||
'--disable-extensions',
|
||||
'--disable-gpu',
|
||||
'--disable-workspace-trust',
|
||||
'--user-data-dir=' + path.join(tmpDir.name, dir, 'user-data'),
|
||||
path.resolve(__dirname, '../../test/data')
|
||||
];
|
||||
@@ -139,6 +142,7 @@ function getLaunchArgs(dir: TestDir) {
|
||||
case TestDir.CliIntegration:
|
||||
// CLI integration tests requires a multi-root workspace so that the data and the QL sources are accessible.
|
||||
return [
|
||||
'--disable-workspace-trust',
|
||||
'--disable-gpu',
|
||||
path.resolve(__dirname, '../../test/data'),
|
||||
|
||||
@@ -155,5 +159,4 @@ function getLaunchArgs(dir: TestDir) {
|
||||
default:
|
||||
assertNever(dir);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import { ConfigurationTarget } from 'vscode';
|
||||
import { ALL_SETTINGS, InspectionResult, Setting } from '../config';
|
||||
|
||||
@@ -59,9 +61,21 @@ class TestSetting<T> {
|
||||
}
|
||||
}
|
||||
|
||||
// Public configuration keys are the ones defined in the package.json.
|
||||
// These keys are documented in the settings page. Other keys are
|
||||
// internal and not documented.
|
||||
const PKG_CONFIGURATION: Record<string, any> = (function initConfigurationKeys() {
|
||||
// Note we are using synchronous file reads here. This is fine because
|
||||
// we are in tests.
|
||||
const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '../../package.json'), 'utf-8'));
|
||||
return pkg.contributes.configuration.properties;
|
||||
}());
|
||||
|
||||
|
||||
// The test settings are all settings in ALL_SETTINGS which don't have any children
|
||||
// and are also not hidden settings like the codeQL.canary.
|
||||
const TEST_SETTINGS = ALL_SETTINGS
|
||||
.filter(setting => ALL_SETTINGS.filter(s => s.parent === setting).length === 0)
|
||||
.filter(setting => (setting.qualifiedName in PKG_CONFIGURATION) && !setting.hasChildren)
|
||||
.map(setting => new TestSetting(setting));
|
||||
|
||||
export const getTestSetting = (setting: Setting): TestSetting<unknown> | undefined => {
|
||||
@@ -79,7 +93,10 @@ export const testConfigHelper = async (mocha: Mocha) => {
|
||||
},
|
||||
async afterAll() {
|
||||
// Restore all settings to their default values after each test suite
|
||||
await Promise.all(TEST_SETTINGS.map(setting => setting.restoreToInitialValues()));
|
||||
// Only do this outside of CI since the sometimes hangs on CI.
|
||||
if (process.env.CI !== 'true') {
|
||||
await Promise.all(TEST_SETTINGS.map(setting => setting.restoreToInitialValues()));
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -3,74 +3,94 @@ import 'chai/register-should';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
|
||||
import { gatherQlFiles } from '../../src/pure/files';
|
||||
import { gatherQlFiles, getDirectoryNamesInsidePath } from '../../src/pure/files';
|
||||
|
||||
chai.use(sinonChai);
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('files', () => {
|
||||
const dataDir = path.join(path.dirname(__dirname), 'data');
|
||||
const data2Dir = path.join(path.dirname(__dirname), 'data2');
|
||||
|
||||
it('should find one file', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[singleFile], false]);
|
||||
describe('gatherQlFiles', async () => {
|
||||
it('should find one file', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[singleFile], false]);
|
||||
});
|
||||
|
||||
it('should find no files', async () => {
|
||||
const result = await gatherQlFiles([]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
|
||||
it('should find no files', async () => {
|
||||
const singleFile = path.join(dataDir, 'library.qll');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
|
||||
it('should handle invalid file', async () => {
|
||||
const singleFile = path.join(dataDir, 'xxx');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
|
||||
it('should find two files', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const otherFile = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const notFile = path.join(dataDir, 'library.qll');
|
||||
const invalidFile = path.join(dataDir, 'xxx');
|
||||
|
||||
const result = await gatherQlFiles([singleFile, otherFile, notFile, invalidFile]);
|
||||
expect(result.sort()).to.deep.equal([[singleFile, otherFile], false]);
|
||||
});
|
||||
|
||||
it('should scan a directory', async () => {
|
||||
const file1 = path.join(dataDir, 'compute-default-strings.ql');
|
||||
const file2 = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const file3 = path.join(dataDir, 'query.ql');
|
||||
|
||||
const result = await gatherQlFiles([dataDir]);
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
});
|
||||
|
||||
it('should scan a directory and some files', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const empty1File = path.join(data2Dir, 'empty1.ql');
|
||||
const empty2File = path.join(data2Dir, 'sub-folder', 'empty2.ql');
|
||||
|
||||
const result = await gatherQlFiles([singleFile, data2Dir]);
|
||||
expect(result.sort()).to.deep.equal([[singleFile, empty1File, empty2File], true]);
|
||||
});
|
||||
|
||||
it('should avoid duplicates', async () => {
|
||||
const file1 = path.join(dataDir, 'compute-default-strings.ql');
|
||||
const file2 = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const file3 = path.join(dataDir, 'query.ql');
|
||||
|
||||
const result = await gatherQlFiles([file1, dataDir, file3]);
|
||||
result[0].sort();
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should find no files', async () => {
|
||||
const result = await gatherQlFiles([]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
describe('getDirectoryNamesInsidePath', async () => {
|
||||
it('should fail if path does not exist', async () => {
|
||||
await expect(getDirectoryNamesInsidePath('xxx')).to.eventually.be.rejectedWith('Path does not exist: xxx');
|
||||
});
|
||||
|
||||
it('should find no files', async () => {
|
||||
const singleFile = path.join(dataDir, 'library.qll');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
it('should fail if path is not a directory', async () => {
|
||||
const filePath = path.join(data2Dir, 'empty1.ql');
|
||||
await expect(getDirectoryNamesInsidePath(filePath)).to.eventually.be.rejectedWith(`Path is not a directory: ${filePath}`);
|
||||
});
|
||||
|
||||
it('should handle invalid file', async () => {
|
||||
const singleFile = path.join(dataDir, 'xxx');
|
||||
const result = await gatherQlFiles([singleFile]);
|
||||
expect(result).to.deep.equal([[], false]);
|
||||
});
|
||||
|
||||
it('should find two files', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const otherFile = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const notFile = path.join(dataDir, 'library.qll');
|
||||
const invalidFile = path.join(dataDir, 'xxx');
|
||||
|
||||
const result = await gatherQlFiles([singleFile, otherFile, notFile, invalidFile]);
|
||||
expect(result.sort()).to.deep.equal([[singleFile, otherFile], false]);
|
||||
});
|
||||
|
||||
it('should scan a directory', async () => {
|
||||
const file1 = path.join(dataDir, 'compute-default-strings.ql');
|
||||
const file2 = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const file3 = path.join(dataDir, 'query.ql');
|
||||
|
||||
const result = await gatherQlFiles([dataDir]);
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
});
|
||||
|
||||
it('should scan a directory and some files', async () => {
|
||||
const singleFile = path.join(dataDir, 'query.ql');
|
||||
const empty1File = path.join(data2Dir, 'empty1.ql');
|
||||
const empty2File = path.join(data2Dir, 'sub-folder', 'empty2.ql');
|
||||
|
||||
const result = await gatherQlFiles([singleFile, data2Dir]);
|
||||
expect(result.sort()).to.deep.equal([[singleFile, empty1File, empty2File], true]);
|
||||
});
|
||||
|
||||
it('should avoid duplicates', async () => {
|
||||
const file1 = path.join(dataDir, 'compute-default-strings.ql');
|
||||
const file2 = path.join(dataDir, 'multiple-result-sets.ql');
|
||||
const file3 = path.join(dataDir, 'query.ql');
|
||||
|
||||
const result = await gatherQlFiles([file1, dataDir, file3]);
|
||||
result[0].sort();
|
||||
expect(result.sort()).to.deep.equal([[file1, file2, file3], true]);
|
||||
it('should find sub-folders', async () => {
|
||||
const result = await getDirectoryNamesInsidePath(data2Dir);
|
||||
expect(result).to.deep.equal(['sub-folder']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { QueryStatus } from '../../src/query-status';
|
||||
import { getQueryHistoryItemId, getQueryText, getRawQueryName } from '../../src/query-history-info';
|
||||
import { buildRepoLabel, getQueryId, getQueryText, getRawQueryName } from '../../src/query-history-info';
|
||||
import { VariantAnalysisHistoryItem } from '../../src/remote-queries/variant-analysis-history-item';
|
||||
import { createMockVariantAnalysis } from '../../src/vscode-tests/factories/remote-queries/shared/variant-analysis';
|
||||
import { createMockScannedRepos } from '../../src/vscode-tests/factories/remote-queries/shared/scanned-repositories';
|
||||
import { createMockLocalQueryInfo } from '../../src/vscode-tests/factories/local-queries/local-query-history-item';
|
||||
import { createMockRemoteQueryHistoryItem } from '../../src/vscode-tests/factories/remote-queries/remote-query-history-item';
|
||||
import { VariantAnalysisRepoStatus, VariantAnalysisStatus } from '../../src/remote-queries/shared/variant-analysis';
|
||||
|
||||
describe('Query history info', () => {
|
||||
|
||||
@@ -17,8 +19,15 @@ describe('Query history info', () => {
|
||||
t: 'variant-analysis',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
historyItemId: 'abc123',
|
||||
variantAnalysis: createMockVariantAnalysis()
|
||||
variantAnalysis: createMockVariantAnalysis(
|
||||
VariantAnalysisStatus.InProgress,
|
||||
createMockScannedRepos([
|
||||
VariantAnalysisRepoStatus.Succeeded,
|
||||
VariantAnalysisRepoStatus.Pending,
|
||||
VariantAnalysisRepoStatus.InProgress,
|
||||
VariantAnalysisRepoStatus.Canceled,
|
||||
])
|
||||
),
|
||||
};
|
||||
|
||||
describe('getRawQueryName', () => {
|
||||
@@ -41,23 +50,23 @@ describe('Query history info', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getQueryHistoryItemId', () => {
|
||||
describe('getQueryId', () => {
|
||||
it('should get the ID for local history items', () => {
|
||||
const historyItemId = getQueryHistoryItemId(localQueryHistoryItem);
|
||||
const historyItemId = getQueryId(localQueryHistoryItem);
|
||||
|
||||
expect(historyItemId).to.equal(localQueryHistoryItem.initialInfo.id);
|
||||
});
|
||||
|
||||
it('should get the ID for remote query history items', () => {
|
||||
const historyItemId = getQueryHistoryItemId(remoteQueryHistoryItem);
|
||||
const historyItemId = getQueryId(remoteQueryHistoryItem);
|
||||
|
||||
expect(historyItemId).to.equal(remoteQueryHistoryItem.queryId);
|
||||
});
|
||||
|
||||
it('should get the ID for variant analysis history items', () => {
|
||||
const historyItemId = getQueryHistoryItemId(variantAnalysisHistoryItem);
|
||||
const historyItemId = getQueryId(variantAnalysisHistoryItem);
|
||||
|
||||
expect(historyItemId).to.equal(variantAnalysisHistoryItem.historyItemId);
|
||||
expect(historyItemId).to.equal(variantAnalysisHistoryItem.variantAnalysis.id.toString());
|
||||
});
|
||||
});
|
||||
|
||||
@@ -80,4 +89,59 @@ describe('Query history info', () => {
|
||||
expect(queryText).to.equal(variantAnalysisHistoryItem.variantAnalysis.query.text);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildRepoLabel', () => {
|
||||
describe('repo label for remote query history items', () => {
|
||||
it('should return controller repo when `repositoryCount` is 0', () => {
|
||||
const repoLabel = buildRepoLabel(remoteQueryHistoryItem);
|
||||
const expectedRepoLabel = `${remoteQueryHistoryItem.remoteQuery.controllerRepository.owner}/${remoteQueryHistoryItem.remoteQuery.controllerRepository.name}`;
|
||||
|
||||
expect(repoLabel).to.equal(expectedRepoLabel);
|
||||
});
|
||||
it('should return number of repositories when `repositoryCount` is non-zero', () => {
|
||||
const remoteQueryHistoryItem2 = createMockRemoteQueryHistoryItem({ repositoryCount: 3 });
|
||||
const repoLabel2 = buildRepoLabel(remoteQueryHistoryItem2);
|
||||
const expectedRepoLabel2 = '3 repositories';
|
||||
|
||||
expect(repoLabel2).to.equal(expectedRepoLabel2);
|
||||
});
|
||||
});
|
||||
describe('repo label for variant analysis history items', () => {
|
||||
it('should return label when `totalScannedRepositoryCount` is 0', () => {
|
||||
const variantAnalysisHistoryItem0: VariantAnalysisHistoryItem = {
|
||||
t: 'variant-analysis',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
variantAnalysis: createMockVariantAnalysis(
|
||||
VariantAnalysisStatus.InProgress,
|
||||
createMockScannedRepos([])
|
||||
),
|
||||
};
|
||||
const repoLabel0 = buildRepoLabel(variantAnalysisHistoryItem0);
|
||||
|
||||
expect(repoLabel0).to.equal('0/0 repositories');
|
||||
});
|
||||
it('should return label when `totalScannedRepositoryCount` is 1', () => {
|
||||
const variantAnalysisHistoryItem1: VariantAnalysisHistoryItem = {
|
||||
t: 'variant-analysis',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
variantAnalysis: createMockVariantAnalysis(
|
||||
VariantAnalysisStatus.InProgress,
|
||||
createMockScannedRepos([
|
||||
VariantAnalysisRepoStatus.Pending,
|
||||
])
|
||||
),
|
||||
};
|
||||
|
||||
const repoLabel1 = buildRepoLabel(variantAnalysisHistoryItem1);
|
||||
expect(repoLabel1).to.equal('0/1 repository');
|
||||
});
|
||||
it('should return label when `totalScannedRepositoryCount` is greater than 1', () => {
|
||||
const repoLabel = buildRepoLabel(variantAnalysisHistoryItem);
|
||||
|
||||
expect(repoLabel).to.equal('2/4 repositories');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
20
extensions/ql-vscode/test/pure-tests/word.test.ts
Normal file
20
extensions/ql-vscode/test/pure-tests/word.test.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { pluralize } from '../../src/pure/word';
|
||||
|
||||
describe('word helpers', () => {
|
||||
describe('pluralize', () => {
|
||||
it('should return the plural form if the number is 0', () => {
|
||||
expect(pluralize(0, 'thing', 'things')).to.eq('0 things');
|
||||
});
|
||||
it('should return the singular form if the number is 1', () => {
|
||||
expect(pluralize(1, 'thing', 'things')).to.eq('1 thing');
|
||||
});
|
||||
it('should return the plural form if the number is greater than 1', () => {
|
||||
expect(pluralize(7, 'thing', 'things')).to.eq('7 things');
|
||||
});
|
||||
it('should return the empty string if the number is undefined', () => {
|
||||
expect(pluralize(undefined, 'thing', 'things')).to.eq('');
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user