Merge branch 'main' into robertbrignull/nightly-codeql

This commit is contained in:
Robert
2023-12-19 17:39:29 +00:00
195 changed files with 12826 additions and 39263 deletions

View File

@@ -7,9 +7,6 @@ updates:
day: "thursday" # Thursday is arbitrary
labels:
- "Update dependencies"
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
@@ -17,6 +14,3 @@ updates:
day: "thursday" # Thursday is arbitrary
labels:
- "Update dependencies"
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]

View File

@@ -1,4 +1,7 @@
{
"ul-style": {
"style": "dash"
},
"MD013": false,
"MD041": false
}

View File

@@ -14,21 +14,21 @@ appearance, race, religion, or sexual identity and orientation.
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
@@ -55,7 +55,7 @@ a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at opensource@github.com. All
reported by contacting the project team at <opensource@github.com>. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.

View File

@@ -22,12 +22,12 @@ Please note that this project is released with a [Contributor Code of Conduct][c
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
* Follow the [style guide][style].
* Write tests:
* [Tests that don't require the VS Code API are located here](extensions/ql-vscode/test).
* [Integration tests that do require the VS Code API are located here](extensions/ql-vscode/src/vscode-tests).
* Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
* Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
- Follow the [style guide][style].
- Write tests:
- [Tests that don't require the VS Code API are located here](extensions/ql-vscode/test).
- [Integration tests that do require the VS Code API are located here](extensions/ql-vscode/src/vscode-tests).
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
- Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
## Setting up a local build
@@ -99,6 +99,6 @@ More information about Storybook can be found inside the **Overview** page once
## Resources
* [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
* [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
* [GitHub Help](https://help.github.com)
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
- [GitHub Help](https://help.github.com)

View File

@@ -11,11 +11,11 @@ To see what has changed in the last few versions of the extension, see the [Chan
## Features
* Enables you to use CodeQL to query databases and discover problems in codebases.
* Shows the flow of data through the results of path queries, which is essential for triaging security results.
* Provides an easy way to run queries from the large, open source repository of [CodeQL security queries](https://github.com/github/codeql).
* Adds IntelliSense to support you writing and editing your own CodeQL query and library files.
* Supports you running CodeQL queries against thousands of repositories on GitHub using multi-repository variant analysis.
- Enables you to use CodeQL to query databases and discover problems in codebases.
- Shows the flow of data through the results of path queries, which is essential for triaging security results.
- Provides an easy way to run queries from the large, open source repository of [CodeQL security queries](https://github.com/github/codeql).
- Adds IntelliSense to support you writing and editing your own CodeQL query and library files.
- Supports you running CodeQL queries against thousands of repositories on GitHub using multi-repository variant analysis.
## Project goals and scope
@@ -25,8 +25,8 @@ This project will track new feature development in CodeQL and, whenever appropri
This extension depends on the following two extensions for required functionality. They will be installed automatically when you install VS Code CodeQL.
* [Test Adapter Converter](https://marketplace.visualstudio.com/items?itemName=ms-vscode.test-adapter-converter)
* [Test Explorer UI](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer)
- [Test Adapter Converter](https://marketplace.visualstudio.com/items?itemName=ms-vscode.test-adapter-converter)
- [Test Explorer UI](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer)
## Contributing

View File

@@ -1,33 +1,33 @@
# Releasing (write access required)
1. Determine the new version number. We default to increasing the patch version number, but make our own judgement about whether a change is big enough to warrant a minor version bump. Common reasons for a minor bump could include:
* Making substantial new features available to all users. This can include lifting a feature flag.
* Breakage in compatibility with recent versions of the CLI.
* Minimum required version of VS Code is increased.
* New telemetry events are added.
* Deprecation or removal of commands.
* Accumulation of many changes, none of which are individually big enough to warrant a minor bump, but which together are. This does not include changes which are purely internal to the extension, such as refactoring, or which are only available behind a feature flag.
- Making substantial new features available to all users. This can include lifting a feature flag.
- Breakage in compatibility with recent versions of the CLI.
- Minimum required version of VS Code is increased.
- New telemetry events are added.
- Deprecation or removal of commands.
- Accumulation of many changes, none of which are individually big enough to warrant a minor bump, but which together are. This does not include changes which are purely internal to the extension, such as refactoring, or which are only available behind a feature flag.
1. Create a release branch named after the new version (e.g. `v1.3.6`):
* For a regular scheduled release this branch will be based on latest `main`.
* Make sure your local copy of `main` is up to date so you are including all changes.
* To do a minimal bug-fix release, base the release branch on the tag from the most recent release and then add only the changes you want to release.
* Choose this option if you want to release a specific set of changes (e.g. a bug fix) and don't want to incur extra risk by including other changes that have been merged to the `main` branch.
- For a regular scheduled release this branch will be based on latest `main`.
- Make sure your local copy of `main` is up to date so you are including all changes.
- To do a minimal bug-fix release, base the release branch on the tag from the most recent release and then add only the changes you want to release.
- Choose this option if you want to release a specific set of changes (e.g. a bug fix) and don't want to incur extra risk by including other changes that have been merged to the `main` branch.
```bash
git checkout -b <new_release_branch> <previous_release_tag>
```
1. Run the ["Run CLI tests" workflow](https://github.com/github/vscode-codeql/actions/workflows/cli-test.yml) and make sure the tests are green.
* You can skip this step if you are releasing from `main` and there were no merges since the most recent daily scheduled run of this workflow.
- You can skip this step if you are releasing from `main` and there were no merges since the most recent daily scheduled run of this workflow.
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
* Go through PRs that have been merged since the previous release and make sure they are properly accounted for.
* Make sure all changelog entries have links back to their PR(s) if appropriate.
- Go through PRs that have been merged since the previous release and make sure they are properly accounted for.
- Make sure all changelog entries have links back to their PR(s) if appropriate.
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
1. Commit any changes made during steps 4 and 5 with a commit message the same as the branch name (e.g. `v1.3.6`).
1. Open a PR for this release.
* The PR diff should contain:
* Any missing bits from steps 4 and 5. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
* If releasing from a branch other than `main`, this PR will also contain the extension changes being released.
- The PR diff should contain:
- Any missing bits from steps 4 and 5. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
- If releasing from a branch other than `main`, this PR will also contain the extension changes being released.
1. Build the extension using `npm run build` and install it on your VS Code using "Install from VSIX".
1. Go through [our test plan](./test-plan.md) to ensure that the extension is working as expected.
1. Create a new tag on the release branch with your new version (named after the release), e.g.
@@ -37,8 +37,8 @@
```
1. Merge the release PR into `main`.
* If there are conflicts in the changelog, make sure to place any new changelog entries at the top, above the section for the current release, as these new entries are not part of the current release and should be placed in the "unreleased" section.
* The release PR must be merged before pushing the tag to ensure that we always release a commit that is present on the `main` branch. It's not required that the commit is the head of the `main` branch, but there should be no chance of a future release accidentally not including changes from this release.
- If there are conflicts in the changelog, make sure to place any new changelog entries at the top, above the section for the current release, as these new entries are not part of the current release and should be placed in the "unreleased" section.
- The release PR must be merged before pushing the tag to ensure that we always release a commit that is present on the `main` branch. It's not required that the commit is the head of the `main` branch, but there should be no chance of a future release accidentally not including changes from this release.
1. Push the new tag up:
```bash
@@ -46,13 +46,13 @@
```
1. Find the [Release](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease) workflow run that was just triggered by pushing the tag, and monitor the status of the release build.
* DO NOT approve the "publish" stages of the workflow yet.
- DO NOT approve the "publish" stages of the workflow yet.
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
or look at the source if there's any doubt the right code is being shipped.
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
1. Approve the deployments of the [Release](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease) workflow run. This will automatically publish to Open VSX and VS Code Marketplace.
* If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
1. Go to the draft GitHub release in [the releases page](https://github.com/github/vscode-codeql/releases), click 'Edit', add some summary description, and publish it.
1. Confirm the new release is marked as the latest release.
1. If documentation changes need to be published, notify documentation team that release has been made.

View File

@@ -2,14 +2,14 @@
We have several types of tests:
* Unit tests: these live in the `tests/unit-tests/` directory
* View tests: these live in `src/view/variant-analysis/__tests__/`
* VSCode integration tests:
* `test/vscode-tests/activated-extension` tests: These are intended to cover functionality that require the full extension to be activated but don't require the CLI. This suite is not run against multiple versions of the CLI in CI.
* `test/vscode-tests/no-workspace` tests: These are intended to cover functionality around not having a workspace. The extension is not activated in these tests.
* `test/vscode-tests/minimal-workspace` tests: These are intended to cover functionality that need a workspace but don't require the full extension to be activated.
* CLI integration tests: these live in `test/vscode-tests/cli-integration`
* These tests are intended to cover functionality that is related to the integration between the CodeQL CLI and the extension. These tests are run against each supported versions of the CLI in CI.
- Unit tests: these live in the `tests/unit-tests/` directory
- View tests: these live in `src/view/variant-analysis/__tests__/`
- VSCode integration tests:
- `test/vscode-tests/activated-extension` tests: These are intended to cover functionality that require the full extension to be activated but don't require the CLI. This suite is not run against multiple versions of the CLI in CI.
- `test/vscode-tests/no-workspace` tests: These are intended to cover functionality around not having a workspace. The extension is not activated in these tests.
- `test/vscode-tests/minimal-workspace` tests: These are intended to cover functionality that need a workspace but don't require the full extension to be activated.
- CLI integration tests: these live in `test/vscode-tests/cli-integration`
- These tests are intended to cover functionality that is related to the integration between the CodeQL CLI and the extension. These tests are run against each supported versions of the CLI in CI.
The CLI integration tests require an instance of the CodeQL CLI to run so they will require some extra setup steps. When adding new tests to our test suite, please be mindful of whether they need to be in the cli-integration folder. If the tests don't depend on the CLI, they are better suited to being a VSCode integration test.
@@ -26,9 +26,9 @@ Pre-requisites:
Then, from the `extensions/ql-vscode` directory, use the appropriate command to run the tests:
* Unit tests: `npm run test:unit`
* View Tests: `npm run test:view`
* VSCode integration tests: `npm run test:vscode-integration`
- Unit tests: `npm run test:unit`
- View Tests: `npm run test:view`
- VSCode integration tests: `npm run test:vscode-integration`
#### Running CLI integration tests from the terminal
@@ -48,9 +48,9 @@ Alternatively, you can run the tests inside of VSCode. There are several VSCode
You will need to run tests using a task from inside of VS Code, under the "Run and Debug" view:
* Unit tests: run the _Launch Unit Tests_ task
* View Tests: run the _Launch Unit Tests - React_ task
* VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
- Unit tests: run the _Launch Unit Tests_ task
- View Tests: run the _Launch Unit Tests - React_ task
- VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
#### Running CLI integration tests from VSCode

View File

@@ -2,6 +2,15 @@
## [UNRELEASED]
- Add a prompt for downloading a GitHub database when opening a GitHub repository. [#3138](https://github.com/github/vscode-codeql/pull/3138)
- Avoid showing a popup when hovering over source elements in database source files. [#3125](https://github.com/github/vscode-codeql/pull/3125)
- Add comparison of alerts when comparing query results. This allows viewing path explanations for differences in alerts. [#3113](https://github.com/github/vscode-codeql/pull/3113)
## 1.11.0 - 13 December 2023
- Add a new method modeling panel to classify methods as sources/sinks/summaries while in the context of the source code. [#3128](https://github.com/github/vscode-codeql/pull/3128)
- Adds the ability to add multiple classifications per method in the CodeQL Model Editor. [#3128](https://github.com/github/vscode-codeql/pull/3128)
- Switch add and delete button positions in the CodeQL Model Editor. [#3123](https://github.com/github/vscode-codeql/pull/3123)
- Add a prompt to the "Quick query" command to encourage users in single-folder workspaces to use "Create query" instead. [#3082](https://github.com/github/vscode-codeql/pull/3082)
- Remove support for CodeQL CLI versions older than 2.11.6. [#3087](https://github.com/github/vscode-codeql/pull/3087)
- Preserve focus on results viewer when showing a location in a file. [#3088](https://github.com/github/vscode-codeql/pull/3088)
@@ -14,7 +23,7 @@
- Add new CodeQL views for managing databases and queries:
1. A queries panel that shows all queries in your workspace. It allows you to view, create, and run queries in one place.
2. A language selector, which allows you to quickly filter databases and queries by language.
For more information, see the [documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/analyzing-your-projects/#filtering-databases-and-queries-by-language).
- When adding a CodeQL database, we no longer add the database source folder to the workspace by default (since this caused bugs in single-folder workspaces). [#3047](https://github.com/github/vscode-codeql/pull/3047)
- You can manually add individual database source folders to the workspace with the "Add Database Source to Workspace" right-click command in the databases view.

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@
"description": "CodeQL for Visual Studio Code",
"author": "GitHub",
"private": true,
"version": "1.10.1",
"version": "1.11.1",
"publisher": "GitHub",
"license": "MIT",
"icon": "media/VS-marketplace-CodeQL-icon.png",
@@ -1840,15 +1840,14 @@
{
"id": "codeQLMethodModeling",
"type": "webview",
"name": "CodeQL Method Modeling",
"when": "config.codeQL.canary"
"name": "CodeQL Method Modeling"
}
],
"codeql-methods-usage": [
{
"id": "codeQLMethodsUsage",
"name": "CodeQL Methods Usage",
"when": "config.codeQL.canary && codeql.modelEditorOpen"
"when": "codeql.modelEditorOpen"
}
]
},
@@ -1915,7 +1914,7 @@
"dependencies": {
"@octokit/plugin-retry": "^6.0.1",
"@octokit/rest": "^20.0.2",
"@vscode/codicons": "^0.0.31",
"@vscode/codicons": "^0.0.35",
"@vscode/debugadapter": "^1.59.0",
"@vscode/debugprotocol": "^1.59.0",
"@vscode/webview-ui-toolkit": "^1.0.1",
@@ -1926,12 +1925,11 @@
"d3": "^7.6.1",
"d3-graphviz": "^5.0.2",
"fs-extra": "^11.1.1",
"immutable": "^4.0.0",
"js-yaml": "^4.1.0",
"msw": "^2.0.0",
"nanoid": "^5.0.1",
"node-fetch": "^2.6.7",
"p-queue": "^7.4.1",
"p-queue": "^8.0.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"semver": "^7.5.2",
@@ -1939,7 +1937,7 @@
"source-map-support": "^0.5.21",
"stream-json": "^1.7.3",
"styled-components": "^6.0.2",
"tmp": "^0.1.0",
"tmp": "^0.2.1",
"tmp-promise": "^3.0.2",
"tree-kill": "^1.2.2",
"unzipper": "^0.10.5",
@@ -1948,6 +1946,7 @@
"vscode-languageclient": "^8.0.2",
"vscode-test-adapter-api": "^1.7.0",
"vscode-test-adapter-util": "^0.7.0",
"yauzl": "^2.10.0",
"zip-a-folder": "^3.1.3"
},
"devDependencies": {
@@ -1957,7 +1956,7 @@
"@babel/preset-react": "^7.18.6",
"@babel/preset-typescript": "^7.21.4",
"@faker-js/faker": "^8.0.2",
"@github/markdownlint-github": "^0.3.0",
"@github/markdownlint-github": "^0.6.0",
"@octokit/plugin-throttling": "^8.0.0",
"@storybook/addon-a11y": "^7.4.6",
"@storybook/addon-actions": "^7.1.0",
@@ -1975,7 +1974,6 @@
"@testing-library/react": "^14.0.0",
"@testing-library/user-event": "^14.4.3",
"@types/child-process-promise": "^2.2.1",
"@types/classnames": "^2.2.9",
"@types/d3": "^7.4.0",
"@types/d3-graphviz": "^2.6.6",
"@types/del": "^4.0.0",
@@ -1993,15 +1991,16 @@
"@types/semver": "^7.2.0",
"@types/stream-json": "^1.7.1",
"@types/styled-components": "^5.1.11",
"@types/tar-stream": "^2.2.2",
"@types/tar-stream": "^3.1.3",
"@types/through2": "^2.0.36",
"@types/tmp": "^0.1.0",
"@types/unzipper": "^0.10.1",
"@types/vscode": "^1.82.0",
"@types/webpack": "^5.28.0",
"@types/webpack-env": "^1.18.0",
"@types/yauzl": "^2.10.3",
"@typescript-eslint/eslint-plugin": "^6.2.1",
"@typescript-eslint/parser": "^6.2.1",
"@typescript-eslint/parser": "^6.14.0",
"@vscode/test-electron": "^2.2.0",
"@vscode/vsce": "^2.19.0",
"ansi-colors": "^4.1.1",
@@ -2010,7 +2009,6 @@
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
"del": "^6.0.0",
"esbuild": "^0.15.15",
"eslint": "^8.23.1",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-etc": "^2.0.2",
@@ -2023,7 +2021,7 @@
"file-loader": "^6.2.0",
"glob": "^10.0.0",
"gulp": "^4.0.2",
"gulp-esbuild": "^0.10.5",
"gulp-esbuild": "^0.12.0",
"gulp-replace": "^1.1.3",
"gulp-typescript": "^5.0.1",
"husky": "^8.0.0",
@@ -2031,8 +2029,8 @@
"jest-environment-jsdom": "^29.0.3",
"jest-runner-vscode": "^3.0.1",
"lint-staged": "^15.0.2",
"markdownlint-cli2": "^0.6.0",
"markdownlint-cli2-formatter-pretty": "^0.0.4",
"markdownlint-cli2": "^0.11.0",
"markdownlint-cli2-formatter-pretty": "^0.0.5",
"mini-css-extract-plugin": "^2.6.1",
"npm-run-all": "^4.1.5",
"patch-package": "^8.0.0",

View File

@@ -11,7 +11,7 @@ import { promisify } from "util";
import { CancellationToken, Disposable, Uri } from "vscode";
import {
BQRSInfo,
BqrsInfo,
DecodedBqrs,
DecodedBqrsChunk,
} from "../common/bqrs-cli-types";
@@ -928,11 +928,11 @@ export class CodeQLCliServer implements Disposable {
* @param bqrsPath The path to the bqrs.
* @param pageSize The page size to precompute offsets into the binary file for.
*/
async bqrsInfo(bqrsPath: string, pageSize?: number): Promise<BQRSInfo> {
async bqrsInfo(bqrsPath: string, pageSize?: number): Promise<BqrsInfo> {
const subcommandArgs = (
pageSize ? ["--paginate-rows", pageSize.toString()] : []
).concat(bqrsPath);
return await this.runJsonCodeQlCliCommand<BQRSInfo>(
return await this.runJsonCodeQlCliCommand<BqrsInfo>(
["bqrs", "info"],
subcommandArgs,
"Reading bqrs header",

View File

@@ -15,7 +15,6 @@ import {
import {
codeQlLauncherName,
deprecatedCodeQlLauncherName,
extractZipArchive,
getRequiredAssetName,
} from "../common/distribution";
import {
@@ -26,6 +25,7 @@ import {
showAndLogErrorMessage,
showAndLogWarningMessage,
} from "../common/logging";
import { unzipToDirectory } from "../common/unzip";
/**
* distribution.ts
@@ -205,9 +205,8 @@ export class DistributionManager implements DistributionProvider {
if (process.env.PATH) {
for (const searchDirectory of process.env.PATH.split(delimiter)) {
const expectedLauncherPath = await getExecutableFromDirectory(
searchDirectory,
);
const expectedLauncherPath =
await getExecutableFromDirectory(searchDirectory);
if (expectedLauncherPath) {
return {
codeQlPath: expectedLauncherPath,
@@ -431,7 +430,7 @@ class ExtensionSpecificDistributionManager {
void extLogger.log(
`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`,
);
await extractZipArchive(archivePath, this.getDistributionStoragePath());
await unzipToDirectory(archivePath, this.getDistributionStoragePath());
} finally {
await remove(tmpDirectory);
}

View File

@@ -4,7 +4,7 @@
* the "for the sake of extensibility" comment in messages.ts.
*/
// eslint-disable-next-line @typescript-eslint/no-namespace
export namespace ColumnKindCode {
export namespace BqrsColumnKindCode {
export const FLOAT = "f";
export const INTEGER = "i";
export const STRING = "s";
@@ -13,55 +13,44 @@ export namespace ColumnKindCode {
export const ENTITY = "e";
}
type ColumnKind =
| typeof ColumnKindCode.FLOAT
| typeof ColumnKindCode.INTEGER
| typeof ColumnKindCode.STRING
| typeof ColumnKindCode.BOOLEAN
| typeof ColumnKindCode.DATE
| typeof ColumnKindCode.ENTITY;
export type BqrsColumnKind =
| typeof BqrsColumnKindCode.FLOAT
| typeof BqrsColumnKindCode.INTEGER
| typeof BqrsColumnKindCode.STRING
| typeof BqrsColumnKindCode.BOOLEAN
| typeof BqrsColumnKindCode.DATE
| typeof BqrsColumnKindCode.ENTITY;
interface Column {
export interface BqrsSchemaColumn {
name?: string;
kind: ColumnKind;
kind: BqrsColumnKind;
}
export interface ResultSetSchema {
export interface BqrsResultSetSchema {
name: string;
rows: number;
columns: Column[];
pagination?: PaginationInfo;
columns: BqrsSchemaColumn[];
pagination?: BqrsPaginationInfo;
}
export function getResultSetSchema(
resultSetName: string,
resultSets: BQRSInfo,
): ResultSetSchema | undefined {
for (const schema of resultSets["result-sets"]) {
if (schema.name === resultSetName) {
return schema;
}
}
return undefined;
}
interface PaginationInfo {
interface BqrsPaginationInfo {
"step-size": number;
offsets: number[];
}
export interface BQRSInfo {
"result-sets": ResultSetSchema[];
export interface BqrsInfo {
"result-sets": BqrsResultSetSchema[];
}
export type BqrsId = number;
export interface EntityValue {
url?: UrlValue;
export interface BqrsEntityValue {
url?: BqrsUrlValue;
label?: string;
id?: BqrsId;
}
export interface LineColumnLocation {
export interface BqrsLineColumnLocation {
uri: string;
startLine: number;
startColumn: number;
@@ -69,7 +58,7 @@ export interface LineColumnLocation {
endColumn: number;
}
export interface WholeFileLocation {
export interface BqrsWholeFileLocation {
uri: string;
startLine: never;
startColumn: never;
@@ -77,47 +66,28 @@ export interface WholeFileLocation {
endColumn: never;
}
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
export type BqrsUrlValue =
| BqrsWholeFileLocation
| BqrsLineColumnLocation
| string;
export type UrlValue = ResolvableLocationValue | string;
export type CellValue = EntityValue | number | string | boolean;
export type ResultRow = CellValue[];
export interface RawResultSet {
readonly schema: ResultSetSchema;
readonly rows: readonly ResultRow[];
}
// TODO: This function is not necessary. It generates a tuple that is slightly easier
// to handle than the ResultSetSchema and DecodedBqrsChunk. But perhaps it is unnecessary
// boilerplate.
export function transformBqrsResultSet(
schema: ResultSetSchema,
page: DecodedBqrsChunk,
): RawResultSet {
return {
schema,
rows: Array.from(page.tuples),
};
}
export type BqrsCellValue = BqrsEntityValue | number | string | boolean;
export type BqrsKind =
| "String"
| "Float"
| "Integer"
| "String"
| "Boolean"
| "Date"
| "Entity";
export interface BqrsColumn {
interface BqrsColumn {
name?: string;
kind: BqrsKind;
}
export interface DecodedBqrsChunk {
tuples: CellValue[][];
tuples: BqrsCellValue[][];
next?: number;
columns: BqrsColumn[];
}

View File

@@ -0,0 +1,216 @@
import {
BqrsCellValue as BqrsCellValue,
BqrsColumnKind as BqrsColumnKind,
BqrsColumnKindCode,
DecodedBqrsChunk,
BqrsEntityValue as BqrsEntityValue,
BqrsLineColumnLocation,
BqrsResultSetSchema,
BqrsUrlValue as BqrsUrlValue,
BqrsWholeFileLocation,
BqrsSchemaColumn,
} from "./bqrs-cli-types";
import {
CellValue,
Column,
ColumnKind,
EntityValue,
RawResultSet,
Row,
UrlValue,
UrlValueResolvable,
} from "./raw-result-types";
import { assertNever } from "./helpers-pure";
import { isEmptyPath } from "./bqrs-utils";
export function bqrsToResultSet(
schema: BqrsResultSetSchema,
chunk: DecodedBqrsChunk,
): RawResultSet {
const name = schema.name;
const totalRowCount = schema.rows;
const columns = schema.columns.map(mapColumn);
const rows = chunk.tuples.map(
(tuple): Row => tuple.map((cell): CellValue => mapCellValue(cell)),
);
const resultSet: RawResultSet = {
name,
totalRowCount,
columns,
rows,
};
if (chunk.next) {
resultSet.nextPageOffset = chunk.next;
}
return resultSet;
}
function mapColumn(column: BqrsSchemaColumn): Column {
const result: Column = {
kind: mapColumnKind(column.kind),
};
if (column.name) {
result.name = column.name;
}
return result;
}
function mapColumnKind(kind: BqrsColumnKind): ColumnKind {
switch (kind) {
case BqrsColumnKindCode.STRING:
return ColumnKind.String;
case BqrsColumnKindCode.FLOAT:
return ColumnKind.Float;
case BqrsColumnKindCode.INTEGER:
return ColumnKind.Integer;
case BqrsColumnKindCode.BOOLEAN:
return ColumnKind.Boolean;
case BqrsColumnKindCode.DATE:
return ColumnKind.Date;
case BqrsColumnKindCode.ENTITY:
return ColumnKind.Entity;
default:
assertNever(kind);
}
}
function mapCellValue(cellValue: BqrsCellValue): CellValue {
switch (typeof cellValue) {
case "string":
return {
type: "string",
value: cellValue,
};
case "number":
return {
type: "number",
value: cellValue,
};
case "boolean":
return {
type: "boolean",
value: cellValue,
};
case "object":
return {
type: "entity",
value: mapEntityValue(cellValue),
};
}
}
function mapEntityValue(cellValue: BqrsEntityValue): EntityValue {
const result: EntityValue = {};
if (cellValue.id) {
result.id = cellValue.id;
}
if (cellValue.label) {
result.label = cellValue.label;
}
if (cellValue.url) {
result.url = mapUrlValue(cellValue.url);
}
return result;
}
export function mapUrlValue(urlValue: BqrsUrlValue): UrlValue | undefined {
if (typeof urlValue === "string") {
const location = tryGetLocationFromString(urlValue);
if (location !== undefined) {
return location;
}
return {
type: "string",
value: urlValue,
};
}
if (isWholeFileLoc(urlValue)) {
return {
type: "wholeFileLocation",
uri: urlValue.uri,
};
}
if (isLineColumnLoc(urlValue)) {
return {
type: "lineColumnLocation",
uri: urlValue.uri,
startLine: urlValue.startLine,
startColumn: urlValue.startColumn,
endLine: urlValue.endLine,
endColumn: urlValue.endColumn,
};
}
return undefined;
}
function isLineColumnLoc(loc: BqrsUrlValue): loc is BqrsLineColumnLocation {
return (
typeof loc !== "string" &&
!isEmptyPath(loc.uri) &&
"startLine" in loc &&
"startColumn" in loc &&
"endLine" in loc &&
"endColumn" in loc
);
}
function isWholeFileLoc(loc: BqrsUrlValue): loc is BqrsWholeFileLocation {
return (
typeof loc !== "string" && !isEmptyPath(loc.uri) && !isLineColumnLoc(loc)
);
}
/**
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
* to describe the location of an entire filesystem resource.
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
*
* Folder resources also get similar URLs, but with the `folder` scheme.
* They are deliberately ignored here, since there is no suitable location to show the user.
*/
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
function tryGetLocationFromString(loc: string): UrlValueResolvable | undefined {
const matches = FILE_LOCATION_REGEX.exec(loc);
if (matches && matches.length > 1 && matches[1]) {
if (isWholeFileMatch(matches)) {
return {
type: "wholeFileLocation",
uri: matches[1],
};
} else {
return {
type: "lineColumnLocation",
uri: matches[1],
startLine: Number(matches[2]),
startColumn: Number(matches[3]),
endLine: Number(matches[4]),
endColumn: Number(matches[5]),
};
}
}
return undefined;
}
function isWholeFileMatch(matches: RegExpExecArray): boolean {
return (
matches[2] === "0" &&
matches[3] === "0" &&
matches[4] === "0" &&
matches[5] === "0"
);
}

View File

@@ -1,111 +1,20 @@
import {
UrlValue,
ResolvableLocationValue,
LineColumnLocation,
WholeFileLocation,
} from "./bqrs-cli-types";
import { createRemoteFileRef } from "../common/location-link-utils";
/**
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
* to describe the location of an entire filesystem resource.
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
*
* Folder resources also get similar URLs, but with the `folder` scheme.
* They are deliberately ignored here, since there is no suitable location to show the user.
*/
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
/**
* Gets a resolvable source file location for the specified `LocationValue`, if possible.
* @param loc The location to test.
*/
export function tryGetResolvableLocation(
loc: UrlValue | undefined,
): ResolvableLocationValue | undefined {
let resolvedLoc;
if (loc === undefined) {
resolvedLoc = undefined;
} else if (isWholeFileLoc(loc) || isLineColumnLoc(loc)) {
resolvedLoc = loc as ResolvableLocationValue;
} else if (isStringLoc(loc)) {
resolvedLoc = tryGetLocationFromString(loc);
} else {
resolvedLoc = undefined;
}
return resolvedLoc;
}
export function tryGetLocationFromString(
loc: string,
): ResolvableLocationValue | undefined {
const matches = FILE_LOCATION_REGEX.exec(loc);
if (matches && matches.length > 1 && matches[1]) {
if (isWholeFileMatch(matches)) {
return {
uri: matches[1],
} as WholeFileLocation;
} else {
return {
uri: matches[1],
startLine: Number(matches[2]),
startColumn: Number(matches[3]),
endLine: Number(matches[4]),
endColumn: Number(matches[5]),
};
}
} else {
return undefined;
}
}
function isWholeFileMatch(matches: RegExpExecArray): boolean {
return (
matches[2] === "0" &&
matches[3] === "0" &&
matches[4] === "0" &&
matches[5] === "0"
);
}
import { isUrlValueResolvable, UrlValue } from "./raw-result-types";
/**
* Checks whether the file path is empty. If so, we do not want to render this location
* as a link.
*
* @param uri A file uri
*/
export function isEmptyPath(uriStr: string) {
return !uriStr || uriStr === "file:/";
}
export function isLineColumnLoc(loc: UrlValue): loc is LineColumnLocation {
return (
typeof loc !== "string" &&
!isEmptyPath(loc.uri) &&
"startLine" in loc &&
"startColumn" in loc &&
"endLine" in loc &&
"endColumn" in loc
);
}
export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
return (
typeof loc !== "string" && !isEmptyPath(loc.uri) && !isLineColumnLoc(loc)
);
}
export function isStringLoc(loc: UrlValue): loc is string {
return typeof loc === "string";
}
export function tryGetRemoteLocation(
loc: UrlValue | undefined,
fileLinkPrefix: string,
sourceLocationPrefix: string | undefined,
): string | undefined {
const resolvableLocation = tryGetResolvableLocation(loc);
if (!resolvableLocation) {
if (!loc || !isUrlValueResolvable(loc)) {
return undefined;
}
@@ -115,22 +24,19 @@ export function tryGetRemoteLocation(
// "file:${sourceLocationPrefix}/relative/path/to/file"
// So we need to strip off the first part to get the relative path.
if (sourceLocationPrefix) {
if (!resolvableLocation.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
if (!loc.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
return undefined;
}
trimmedLocation = resolvableLocation.uri.replace(
`file:${sourceLocationPrefix}/`,
"",
);
trimmedLocation = loc.uri.replace(`file:${sourceLocationPrefix}/`, "");
} else {
// If the source location prefix is empty (e.g. for older remote queries), we assume that the database
// was created on a Linux actions runner and has the format:
// "file:/home/runner/work/<repo>/<repo>/relative/path/to/file"
// So we need to drop the first 6 parts of the path.
if (!resolvableLocation.uri.startsWith("file:/home/runner/work/")) {
if (!loc.uri.startsWith("file:/home/runner/work/")) {
return undefined;
}
const locationParts = resolvableLocation.uri.split("/");
const locationParts = loc.uri.split("/");
trimmedLocation = locationParts.slice(6, locationParts.length).join("/");
}
@@ -138,11 +44,16 @@ export function tryGetRemoteLocation(
fileLinkPrefix,
filePath: trimmedLocation,
};
if (loc.type === "wholeFileLocation") {
return createRemoteFileRef(fileLink);
}
return createRemoteFileRef(
fileLink,
resolvableLocation.startLine,
resolvableLocation.endLine,
resolvableLocation.startColumn,
resolvableLocation.endColumn,
loc.startLine,
loc.endLine,
loc.startColumn,
loc.endColumn,
);
}

View File

@@ -1,7 +1,4 @@
import { platform } from "os";
import { Open } from "unzipper";
import { join } from "path";
import { pathExists, chmod } from "fs-extra";
/**
* Get the name of the codeql cli installation we prefer to install, based on our current platform.
@@ -19,31 +16,6 @@ export function getRequiredAssetName(): string {
}
}
export async function extractZipArchive(
archivePath: string,
outPath: string,
): Promise<void> {
const archive = await Open.file(archivePath);
await archive.extract({
concurrency: 4,
path: outPath,
});
// Set file permissions for extracted files
await Promise.all(
archive.files.map(async (file) => {
// Only change file permissions if within outPath (path.join normalises the path)
const extractedPath = join(outPath, file.path);
if (
extractedPath.indexOf(outPath) !== 0 ||
!(await pathExists(extractedPath))
) {
return Promise.resolve();
}
return chmod(extractedPath, file.externalFileAttributes >>> 16);
}),
);
}
export function codeQlLauncherName(): string {
return platform() === "win32" ? "codeql.exe" : "codeql";
}

View File

@@ -1,11 +1,4 @@
import * as sarif from "sarif";
import {
RawResultSet,
ResultRow,
ResultSetSchema,
ResolvableLocationValue,
BqrsColumn,
} from "../common/bqrs-cli-types";
import {
VariantAnalysis,
VariantAnalysisScannedRepositoryResult,
@@ -25,6 +18,12 @@ import {
} from "../model-editor/shared/view-state";
import { Mode } from "../model-editor/shared/mode";
import { QueryLanguage } from "./query-language";
import {
Column,
RawResultSet,
Row,
UrlValueResolvable,
} from "./raw-result-types";
/**
* This module contains types and code that are shared between
@@ -35,10 +34,13 @@ export const SELECT_TABLE_NAME = "#select";
export const ALERTS_TABLE_NAME = "alerts";
export const GRAPH_TABLE_NAME = "graph";
export type RawTableResultSet = { t: "RawResultSet" } & RawResultSet;
export type InterpretedResultSet<T> = {
type RawTableResultSet = {
t: "RawResultSet";
resultSet: RawResultSet;
};
type InterpretedResultSet<T> = {
t: "InterpretedResultSet";
readonly schema: ResultSetSchema;
name: string;
interpretation: InterpretationT<T>;
};
@@ -208,7 +210,7 @@ export type FromResultsViewMsg =
*/
interface ViewSourceFileMsg {
t: "viewSourceFile";
loc: ResolvableLocationValue;
loc: UrlValueResolvable;
databaseUri: string;
}
@@ -369,7 +371,9 @@ export interface SetComparisonsMessage {
readonly message: string | undefined;
}
type QueryCompareResult = RawQueryCompareResult | InterpretedQueryCompareResult;
export type QueryCompareResult =
| RawQueryCompareResult
| InterpretedQueryCompareResult;
/**
* from is the set of rows that have changes in the "from" query.
@@ -377,16 +381,16 @@ type QueryCompareResult = RawQueryCompareResult | InterpretedQueryCompareResult;
*/
export type RawQueryCompareResult = {
kind: "raw";
columns: readonly BqrsColumn[];
from: ResultRow[];
to: ResultRow[];
columns: readonly Column[];
from: Row[];
to: Row[];
};
/**
* from is the set of results that have changes in the "from" query.
* to is the set of results that have changes in the "to" query.
*/
type InterpretedQueryCompareResult = {
export type InterpretedQueryCompareResult = {
kind: "interpreted";
sourceLocationPrefix: string;
from: sarif.Result[];

View File

@@ -0,0 +1,90 @@
export enum ColumnKind {
String = "string",
Float = "float",
Integer = "integer",
Boolean = "boolean",
Date = "date",
Entity = "entity",
}
export type Column = {
name?: string;
kind: ColumnKind;
};
type UrlValueString = {
type: "string";
value: string;
};
export type UrlValueWholeFileLocation = {
type: "wholeFileLocation";
uri: string;
};
export type UrlValueLineColumnLocation = {
type: "lineColumnLocation";
uri: string;
startLine: number;
startColumn: number;
endLine: number;
endColumn: number;
};
export type UrlValueResolvable =
| UrlValueWholeFileLocation
| UrlValueLineColumnLocation;
export function isUrlValueResolvable(
value: UrlValue,
): value is UrlValueResolvable {
return (
value.type === "wholeFileLocation" || value.type === "lineColumnLocation"
);
}
export type UrlValue = UrlValueString | UrlValueResolvable;
export type EntityValue = {
url?: UrlValue;
label?: string;
id?: number;
};
type CellValueEntity = {
type: "entity";
value: EntityValue;
};
type CellValueNumber = {
type: "number";
value: number;
};
type CellValueString = {
type: "string";
value: string;
};
type CellValueBoolean = {
type: "boolean";
value: boolean;
};
export type CellValue =
| CellValueEntity
| CellValueNumber
| CellValueString
| CellValueBoolean;
export type Row = CellValue[];
export type RawResultSet = {
name: string;
totalRowCount: number;
columns: Column[];
rows: Row[];
nextPageOffset?: number;
};

View File

@@ -1,11 +1,11 @@
export type DeepReadonly<T> = T extends Array<infer R>
? DeepReadonlyArray<R>
: // eslint-disable-next-line @typescript-eslint/ban-types
T extends Function
? T
: T extends object
? DeepReadonlyObject<T>
: T;
T extends Function
? T
: T extends object
? DeepReadonlyObject<T>
: T;
interface DeepReadonlyArray<T> extends ReadonlyArray<DeepReadonly<T>> {}

View File

@@ -1,6 +1,6 @@
import * as Sarif from "sarif";
import type { HighlightedRegion } from "../variant-analysis/shared/analysis-result";
import { ResolvableLocationValue } from "../common/bqrs-cli-types";
import { UrlValueResolvable } from "./raw-result-types";
import { isEmptyPath } from "./bqrs-utils";
export interface SarifLink {
@@ -16,7 +16,7 @@ interface NoLocation {
}
type ParsedSarifLocation =
| (ResolvableLocationValue & {
| (UrlValueResolvable & {
userVisibleFile: string;
})
// Resolvable locations have a `uri` field, but it will sometimes include
@@ -137,6 +137,7 @@ export function parseSarifLocation(
// If the region property is absent, the physicalLocation object refers to the entire file.
// Source: https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Toc16012638.
return {
type: "wholeFileLocation",
uri: effectiveLocation,
userVisibleFile,
} as ParsedSarifLocation;
@@ -144,6 +145,7 @@ export function parseSarifLocation(
const region = parseSarifRegion(physicalLocation.region);
return {
type: "lineColumnLocation",
uri: effectiveLocation,
userVisibleFile,
...region,
@@ -232,14 +234,14 @@ export function parseHighlightedLine(
const highlightStartColumn = isSingleLineHighlight
? highlightedRegion.startColumn
: isFirstHighlightedLine
? highlightedRegion.startColumn
: 0;
? highlightedRegion.startColumn
: 0;
const highlightEndColumn = isSingleLineHighlight
? highlightedRegion.endColumn
: isLastHighlightedLine
? highlightedRegion.endColumn
: line.length + 1;
? highlightedRegion.endColumn
: line.length + 1;
const plainSection1 = line.substring(0, highlightStartColumn - 1);
const highlightedSection = line.substring(

View File

@@ -0,0 +1,147 @@
import { Entry as ZipEntry, open, Options as ZipOptions, ZipFile } from "yauzl";
import { Readable } from "stream";
import { dirname, join } from "path";
import { WriteStream } from "fs";
import { createWriteStream, ensureDir } from "fs-extra";
// We can't use promisify because it picks up the wrong overload.
export function openZip(
path: string,
options: ZipOptions = {},
): Promise<ZipFile> {
return new Promise((resolve, reject) => {
open(path, options, (err, zipFile) => {
if (err) {
reject(err);
return;
}
resolve(zipFile);
});
});
}
export function excludeDirectories(entries: ZipEntry[]): ZipEntry[] {
return entries.filter((entry) => !/\/$/.test(entry.fileName));
}
export function readZipEntries(zipFile: ZipFile): Promise<ZipEntry[]> {
return new Promise((resolve, reject) => {
const files: ZipEntry[] = [];
zipFile.readEntry();
zipFile.on("entry", (entry: ZipEntry) => {
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'
// We don't need to do anything for directories.
} else {
files.push(entry);
}
zipFile.readEntry();
});
zipFile.on("end", () => {
resolve(files);
});
zipFile.on("error", (err) => {
reject(err);
});
});
}
function openZipReadStream(
zipFile: ZipFile,
entry: ZipEntry,
): Promise<Readable> {
return new Promise((resolve, reject) => {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
return;
}
resolve(readStream);
});
});
}
export async function openZipBuffer(
zipFile: ZipFile,
entry: ZipEntry,
): Promise<Buffer> {
const readable = await openZipReadStream(zipFile, entry);
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
readable.on("data", (chunk) => {
chunks.push(chunk);
});
readable.on("error", (err) => {
reject(err);
});
readable.on("end", () => {
resolve(Buffer.concat(chunks));
});
});
}
async function copyStream(
readable: Readable,
writeStream: WriteStream,
): Promise<void> {
return new Promise((resolve, reject) => {
readable.on("error", (err) => {
reject(err);
});
readable.on("end", () => {
resolve();
});
readable.pipe(writeStream);
});
}
export async function unzipToDirectory(
archivePath: string,
destinationPath: string,
): Promise<void> {
const zipFile = await openZip(archivePath, {
autoClose: false,
strictFileNames: true,
lazyEntries: true,
});
try {
const entries = await readZipEntries(zipFile);
for (const entry of entries) {
const path = join(destinationPath, entry.fileName);
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'
await ensureDir(path);
} else {
// Ensure the directory exists
await ensureDir(dirname(path));
const readable = await openZipReadStream(zipFile, entry);
let mode: number | undefined = entry.externalFileAttributes >>> 16;
if (mode <= 0) {
mode = undefined;
}
const writeStream = createWriteStream(path, {
autoClose: true,
mode,
});
await copyStream(readable, writeStream);
}
}
} finally {
zipFile.close();
}
}

View File

@@ -1,7 +1,13 @@
import { pathExists } from "fs-extra";
import * as unzipper from "unzipper";
import { Entry as ZipEntry, ZipFile } from "yauzl";
import * as vscode from "vscode";
import { extLogger } from "../logging/vscode";
import {
excludeDirectories,
openZip,
openZipBuffer,
readZipEntries,
} from "../unzip";
// All path operations in this file must be on paths *within* the zip
// archive.
@@ -177,7 +183,8 @@ function ensureDir(map: DirectoryHierarchyMap, dir: string) {
}
type Archive = {
unzipped: unzipper.CentralDirectory;
zipFile: ZipFile;
entries: ZipEntry[];
dirMap: DirectoryHierarchyMap;
};
@@ -185,12 +192,22 @@ async function parse_zip(zipPath: string): Promise<Archive> {
if (!(await pathExists(zipPath))) {
throw vscode.FileSystemError.FileNotFound(zipPath);
}
const zipFile = await openZip(zipPath, {
lazyEntries: true,
autoClose: false,
strictFileNames: true,
});
const entries = excludeDirectories(await readZipEntries(zipFile));
const archive: Archive = {
unzipped: await unzipper.Open.file(zipPath),
zipFile,
entries,
dirMap: new Map(),
};
archive.unzipped.files.forEach((f) => {
ensureFile(archive.dirMap, path.resolve("/", f.path));
entries.forEach((f) => {
ensureFile(archive.dirMap, path.resolve("/", f.fileName));
});
return archive;
}
@@ -276,22 +293,16 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
// use '/' as path separator throughout
const reqPath = ref.pathWithinSourceArchive;
const file = archive.unzipped.files.find((f) => {
const absolutePath = path.resolve("/", f.path);
const file = archive.entries.find((f) => {
const absolutePath = path.resolve("/", f.fileName);
return (
absolutePath === reqPath ||
absolutePath === path.join("/src_archive", reqPath)
);
});
if (file !== undefined) {
if (file.type === "File") {
return new File(reqPath, await file.buffer());
} else {
// file.type === 'Directory'
// I haven't observed this case in practice. Could it happen
// with a zip file that contains empty directories?
return new Directory(reqPath);
}
const buffer = await openZipBuffer(archive.zipFile, file);
return new File(reqPath, buffer);
}
if (archive.dirMap.has(reqPath)) {
return new Directory(reqPath);

View File

@@ -162,8 +162,8 @@ export class ExtensionTelemetryListener
const status = !error
? CommandCompletion.Success
: error instanceof UserCancellationException
? CommandCompletion.Cancelled
: CommandCompletion.Failed;
? CommandCompletion.Cancelled
: CommandCompletion.Failed;
this.reporter.sendTelemetryEvent(
"command-usage",

View File

@@ -1,11 +0,0 @@
import { Open } from "unzipper";
/**
* Unzips a zip file to a directory.
* @param sourcePath The path to the zip file.
* @param destinationPath The path to the directory to unzip to.
*/
export async function unzipFile(sourcePath: string, destinationPath: string) {
const file = await Open.file(sourcePath);
await file.extract({ path: destinationPath });
}

View File

@@ -1,7 +1,10 @@
import { ViewColumn } from "vscode";
import {
ALERTS_TABLE_NAME,
FromCompareViewMessage,
InterpretedQueryCompareResult,
QueryCompareResult,
RawQueryCompareResult,
ToCompareViewMessage,
} from "../common/interface-types";
@@ -10,7 +13,7 @@ import { extLogger } from "../common/logging/vscode";
import { CodeQLCliServer } from "../codeql-cli/cli";
import { DatabaseManager } from "../databases/local-databases";
import { jumpToLocation } from "../databases/local-databases/locations";
import { BQRSInfo, DecodedBqrsChunk } from "../common/bqrs-cli-types";
import { BqrsInfo } from "../common/bqrs-cli-types";
import resultsDiff from "./resultsDiff";
import { CompletedLocalQueryInfo } from "../query-results";
import { assertNever, getErrorMessage } from "../common/helpers-pure";
@@ -22,16 +25,21 @@ import {
import { telemetryListener } from "../common/vscode/telemetry";
import { redactableError } from "../common/errors";
import { App } from "../common/app";
import { bqrsToResultSet } from "../common/bqrs-raw-results-mapper";
import { RawResultSet } from "../common/raw-result-types";
import {
CompareQueryInfo,
findCommonResultSetNames,
findResultSetNames,
getResultSetNames,
} from "./result-set-names";
import { compareInterpretedResults } from "./interpreted-results";
interface ComparePair {
from: CompletedLocalQueryInfo;
fromSchemas: BQRSInfo;
fromInfo: CompareQueryInfo;
to: CompletedLocalQueryInfo;
toSchemas: BQRSInfo;
toInfo: CompareQueryInfo;
commonResultSetNames: readonly string[];
}
@@ -60,23 +68,48 @@ export class CompareView extends AbstractWebview<
to: CompletedLocalQueryInfo,
selectedResultSetName?: string,
) {
const fromSchemas = await this.cliServer.bqrsInfo(
from.completedQuery.query.resultsPaths.resultsPath,
);
const toSchemas = await this.cliServer.bqrsInfo(
to.completedQuery.query.resultsPaths.resultsPath,
);
const [fromSchemas, toSchemas] = await Promise.all([
this.cliServer.bqrsInfo(
from.completedQuery.query.resultsPaths.resultsPath,
),
this.cliServer.bqrsInfo(to.completedQuery.query.resultsPaths.resultsPath),
]);
const commonResultSetNames = await findCommonResultSetNames(
fromSchemas,
toSchemas,
const [fromSchemaNames, toSchemaNames] = await Promise.all([
getResultSetNames(
fromSchemas,
from.completedQuery.query.metadata,
from.completedQuery.query.resultsPaths.interpretedResultsPath,
),
getResultSetNames(
toSchemas,
to.completedQuery.query.metadata,
to.completedQuery.query.resultsPaths.interpretedResultsPath,
),
]);
const commonResultSetNames = findCommonResultSetNames(
fromSchemaNames,
toSchemaNames,
);
this.comparePair = {
from,
fromSchemas,
fromInfo: {
schemas: fromSchemas,
schemaNames: fromSchemaNames,
metadata: from.completedQuery.query.metadata,
interpretedResultsPath:
from.completedQuery.query.resultsPaths.interpretedResultsPath,
},
to,
toSchemas,
toInfo: {
schemas: toSchemas,
schemaNames: toSchemaNames,
metadata: to.completedQuery.query.metadata,
interpretedResultsPath:
to.completedQuery.query.resultsPaths.interpretedResultsPath,
},
commonResultSetNames,
};
@@ -117,16 +150,28 @@ export class CompareView extends AbstractWebview<
panel.reveal(undefined, true);
await this.waitForPanelLoaded();
const { currentResultSetDisplayName, fromResultSet, toResultSet } =
await this.findResultSetsToCompare(
this.comparePair,
selectedResultSetName,
);
const {
currentResultSetName,
currentResultSetDisplayName,
fromResultSetName,
toResultSetName,
} = await this.findResultSetsToCompare(
this.comparePair,
selectedResultSetName,
);
if (currentResultSetDisplayName) {
let result: RawQueryCompareResult | undefined;
let result: QueryCompareResult | undefined;
let message: string | undefined;
try {
result = this.compareResults(fromResultSet, toResultSet);
if (currentResultSetName === ALERTS_TABLE_NAME) {
result = await this.compareInterpretedResults(this.comparePair);
} else {
result = await this.compareResults(
this.comparePair,
fromResultSetName,
toResultSetName,
);
}
} catch (e) {
message = getErrorMessage(e);
}
@@ -203,31 +248,27 @@ export class CompareView extends AbstractWebview<
}
private async findResultSetsToCompare(
{ from, fromSchemas, to, toSchemas, commonResultSetNames }: ComparePair,
{ fromInfo, toInfo, commonResultSetNames }: ComparePair,
selectedResultSetName: string | undefined,
) {
const { currentResultSetDisplayName, fromResultSetName, toResultSetName } =
await findResultSetNames(
fromSchemas,
toSchemas,
commonResultSetNames,
selectedResultSetName,
);
const fromResultSet = await this.getResultSet(
fromSchemas,
fromResultSetName,
from.completedQuery.query.resultsPaths.resultsPath,
);
const toResultSet = await this.getResultSet(
toSchemas,
toResultSetName,
to.completedQuery.query.resultsPaths.resultsPath,
);
return {
const {
currentResultSetName,
currentResultSetDisplayName,
fromResultSet,
toResultSet,
fromResultSetName,
toResultSetName,
} = await findResultSetNames(
fromInfo,
toInfo,
commonResultSetNames,
selectedResultSetName,
);
return {
commonResultSetNames,
currentResultSetName,
currentResultSetDisplayName,
fromResultSetName,
toResultSetName,
};
}
@@ -236,25 +277,51 @@ export class CompareView extends AbstractWebview<
}
private async getResultSet(
bqrsInfo: BQRSInfo,
bqrsInfo: BqrsInfo,
resultSetName: string,
resultsPath: string,
): Promise<DecodedBqrsChunk> {
): Promise<RawResultSet> {
const schema = bqrsInfo["result-sets"].find(
(schema) => schema.name === resultSetName,
);
if (!schema) {
throw new Error(`Schema ${resultSetName} not found.`);
}
return await this.cliServer.bqrsDecode(resultsPath, resultSetName);
const chunk = await this.cliServer.bqrsDecode(resultsPath, resultSetName);
return bqrsToResultSet(schema, chunk);
}
private compareResults(
fromResults: DecodedBqrsChunk,
toResults: DecodedBqrsChunk,
): RawQueryCompareResult {
// Only compare columns that have the same name
return resultsDiff(fromResults, toResults);
private async compareResults(
{ from, fromInfo, to, toInfo }: ComparePair,
fromResultSetName: string,
toResultSetName: string,
): Promise<RawQueryCompareResult> {
const [fromResultSet, toResultSet] = await Promise.all([
this.getResultSet(
fromInfo.schemas,
fromResultSetName,
from.completedQuery.query.resultsPaths.resultsPath,
),
this.getResultSet(
toInfo.schemas,
toResultSetName,
to.completedQuery.query.resultsPaths.resultsPath,
),
]);
return resultsDiff(fromResultSet, toResultSet);
}
private async compareInterpretedResults({
from,
to,
}: ComparePair): Promise<InterpretedQueryCompareResult> {
return compareInterpretedResults(
this.databaseManager,
this.cliServer,
from,
to,
);
}
private async openQuery(kind: "from" | "to") {

View File

@@ -0,0 +1,72 @@
import { Uri } from "vscode";
import * as sarif from "sarif";
import { pathExists } from "fs-extra";
import { sarifParser } from "../common/sarif-parser";
import { CompletedLocalQueryInfo } from "../query-results";
import { DatabaseManager } from "../databases/local-databases";
import { CodeQLCliServer } from "../codeql-cli/cli";
import { InterpretedQueryCompareResult } from "../common/interface-types";
import { sarifDiff } from "./sarif-diff";
async function getInterpretedResults(
interpretedResultsPath: string,
): Promise<sarif.Log | undefined> {
if (!(await pathExists(interpretedResultsPath))) {
return undefined;
}
return await sarifParser(interpretedResultsPath);
}
export async function compareInterpretedResults(
databaseManager: DatabaseManager,
cliServer: CodeQLCliServer,
fromQuery: CompletedLocalQueryInfo,
toQuery: CompletedLocalQueryInfo,
): Promise<InterpretedQueryCompareResult> {
const database = databaseManager.findDatabaseItem(
Uri.parse(toQuery.initialInfo.databaseInfo.databaseUri),
);
if (!database) {
throw new Error(
"Could not find database the queries. Please check that the database still exists.",
);
}
const [fromResultSet, toResultSet, sourceLocationPrefix] = await Promise.all([
getInterpretedResults(
fromQuery.completedQuery.query.resultsPaths.interpretedResultsPath,
),
getInterpretedResults(
toQuery.completedQuery.query.resultsPaths.interpretedResultsPath,
),
database.getSourceLocationPrefix(cliServer),
]);
if (!fromResultSet || !toResultSet) {
throw new Error(
"Could not find interpreted results for one or both queries.",
);
}
const fromResults = fromResultSet.runs[0].results;
const toResults = toResultSet.runs[0].results;
if (!fromResults) {
throw new Error("No results found in the 'from' query.");
}
if (!toResults) {
throw new Error("No results found in the 'to' query.");
}
const { from, to } = sarifDiff(fromResults, toResults);
return {
kind: "interpreted",
sourceLocationPrefix,
from,
to,
};
}

View File

@@ -1,28 +1,49 @@
import { BQRSInfo } from "../common/bqrs-cli-types";
import { getDefaultResultSetName } from "../common/interface-types";
import { pathExists } from "fs-extra";
import { BqrsInfo } from "../common/bqrs-cli-types";
import {
ALERTS_TABLE_NAME,
getDefaultResultSetName,
QueryMetadata,
} from "../common/interface-types";
export async function findCommonResultSetNames(
fromSchemas: BQRSInfo,
toSchemas: BQRSInfo,
export async function getResultSetNames(
schemas: BqrsInfo,
metadata: QueryMetadata | undefined,
interpretedResultsPath: string | undefined,
): Promise<string[]> {
const fromSchemaNames = fromSchemas["result-sets"].map(
(schema) => schema.name,
);
const toSchemaNames = toSchemas["result-sets"].map((schema) => schema.name);
const schemaNames = schemas["result-sets"].map((schema) => schema.name);
if (metadata?.kind !== "graph" && interpretedResultsPath) {
if (await pathExists(interpretedResultsPath)) {
schemaNames.push(ALERTS_TABLE_NAME);
}
}
return schemaNames;
}
export function findCommonResultSetNames(
fromSchemaNames: string[],
toSchemaNames: string[],
): string[] {
return fromSchemaNames.filter((name) => toSchemaNames.includes(name));
}
export type CompareQueryInfo = {
schemas: BqrsInfo;
schemaNames: string[];
metadata: QueryMetadata | undefined;
interpretedResultsPath: string;
};
export async function findResultSetNames(
fromSchemas: BQRSInfo,
toSchemas: BQRSInfo,
from: CompareQueryInfo,
to: CompareQueryInfo,
commonResultSetNames: readonly string[],
selectedResultSetName: string | undefined,
) {
const fromSchemaNames = fromSchemas["result-sets"].map(
(schema) => schema.name,
);
const toSchemaNames = toSchemas["result-sets"].map((schema) => schema.name);
const fromSchemaNames = from.schemaNames;
const toSchemaNames = to.schemaNames;
// Fall back on the default result set names if there are no common ones.
const defaultFromResultSetName = fromSchemaNames.find((name) =>
@@ -47,6 +68,7 @@ export async function findResultSetNames(
const toResultSetName = currentResultSetName || defaultToResultSetName!;
return {
currentResultSetName,
currentResultSetDisplayName:
currentResultSetName ||
`${defaultFromResultSetName} <-> ${defaultToResultSetName}`,

View File

@@ -1,5 +1,5 @@
import { DecodedBqrsChunk } from "../common/bqrs-cli-types";
import { RawQueryCompareResult } from "../common/interface-types";
import { RawResultSet } from "../common/raw-result-types";
/**
* Compare the rows of two queries. Use deep equality to determine if
@@ -20,31 +20,31 @@ import { RawQueryCompareResult } from "../common/interface-types";
* 3. If the queries are 100% disjoint
*/
export default function resultsDiff(
fromResults: DecodedBqrsChunk,
toResults: DecodedBqrsChunk,
fromResults: RawResultSet,
toResults: RawResultSet,
): RawQueryCompareResult {
if (fromResults.columns.length !== toResults.columns.length) {
throw new Error("CodeQL Compare: Columns do not match.");
}
if (!fromResults.tuples.length) {
if (!fromResults.rows.length) {
throw new Error("CodeQL Compare: Source query has no results.");
}
if (!toResults.tuples.length) {
if (!toResults.rows.length) {
throw new Error("CodeQL Compare: Target query has no results.");
}
const results: RawQueryCompareResult = {
kind: "raw",
columns: fromResults.columns,
from: arrayDiff(fromResults.tuples, toResults.tuples),
to: arrayDiff(toResults.tuples, fromResults.tuples),
from: arrayDiff(fromResults.rows, toResults.rows),
to: arrayDiff(toResults.rows, fromResults.rows),
};
if (
fromResults.tuples.length === results.from.length &&
toResults.tuples.length === results.to.length
fromResults.rows.length === results.from.length &&
toResults.rows.length === results.to.length
) {
throw new Error("CodeQL Compare: No overlap between the selected queries.");
}

View File

@@ -0,0 +1,50 @@
import * as sarif from "sarif";
/**
* Compare the alerts of two queries. Use deep equality to determine if
* results have been added or removed across two invocations of a query.
*
* Assumptions:
*
* 1. Queries have the same sort order
* 2. Results are not changed or re-ordered, they are only added or removed
*
* @param fromResults the source query
* @param toResults the target query
*
* @throws Error when:
* 1. If either query is empty
* 2. If the queries are 100% disjoint
*/
export function sarifDiff(
fromResults: sarif.Result[],
toResults: sarif.Result[],
) {
if (!fromResults.length) {
throw new Error("CodeQL Compare: Source query has no results.");
}
if (!toResults.length) {
throw new Error("CodeQL Compare: Target query has no results.");
}
const results = {
from: arrayDiff(fromResults, toResults),
to: arrayDiff(toResults, fromResults),
};
if (
fromResults.length === results.from.length &&
toResults.length === results.to.length
) {
throw new Error("CodeQL Compare: No overlap between the selected queries.");
}
return results;
}
function arrayDiff<T>(source: readonly T[], toRemove: readonly T[]): T[] {
// Stringify the object so that we can compare hashes in the set
const rest = new Set(toRemove.map((item) => JSON.stringify(item)));
return source.filter((element) => !rest.has(JSON.stringify(element)));
}

View File

@@ -123,14 +123,6 @@ export interface DistributionConfig {
repositoryName?: string;
channel: CLIChannel;
onDidChangeConfiguration?: Event<void>;
/**
* This forces an update of the distribution, even if the settings haven't changed.
*
* This should only be used when the distribution has been updated outside of the extension
* and only in tests. It should not be called in production code.
*/
forceUpdateConfiguration(): void;
}
// Query server configuration
@@ -269,7 +261,10 @@ export class DistributionConfigListener
implements DistributionConfig
{
public get customCodeQlPath(): string | undefined {
return CUSTOM_CODEQL_PATH_SETTING.getValue() || undefined;
const testCliPath =
isIntegrationTestMode() &&
process.env.VSCODE_CODEQL_TESTING_CODEQL_CLI_TEST_PATH;
return CUSTOM_CODEQL_PATH_SETTING.getValue() || testCliPath || undefined;
}
public get includePrerelease(): boolean {
@@ -293,10 +288,6 @@ export class DistributionConfigListener
: "released";
}
public forceUpdateConfiguration() {
this._onDidChangeConfiguration.fire(undefined);
}
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
this.handleDidChangeConfigurationForRelevantSettings(
DISTRIBUTION_CHANGE_SETTINGS,
@@ -736,7 +727,6 @@ export interface ModelConfig {
flowGeneration: boolean;
llmGeneration: boolean;
getExtensionsDirectory(languageId: string): string | undefined;
showMultipleModels: boolean;
enableRuby: boolean;
}
@@ -775,10 +765,6 @@ export class ModelConfigListener extends ConfigListener implements ModelConfig {
});
}
public get showMultipleModels(): boolean {
return isCanary();
}
public get enableRuby(): boolean {
return !!ENABLE_RUBY.getValue<boolean>();
}
@@ -786,8 +772,6 @@ export class ModelConfigListener extends ConfigListener implements ModelConfig {
const GITHUB_DATABASE_SETTING = new Setting("githubDatabase", ROOT_SETTING);
// Feature flag for the GitHub database downnload.
const GITHUB_DATABASE_ENABLE = new Setting("enable", GITHUB_DATABASE_SETTING);
const GITHUB_DATABASE_DOWNLOAD = new Setting(
"download",
GITHUB_DATABASE_SETTING,
@@ -802,7 +786,6 @@ const GitHubDatabaseUpdateValues = ["ask", "never"] as const;
type GitHubDatabaseUpdate = (typeof GitHubDatabaseUpdateValues)[number];
export interface GitHubDatabaseConfig {
enable: boolean;
download: GitHubDatabaseDownload;
update: GitHubDatabaseUpdate;
setDownload(
@@ -826,10 +809,6 @@ export class GitHubDatabaseConfigListener
);
}
public get enable() {
return !!GITHUB_DATABASE_ENABLE.getValue<boolean>();
}
public get download(): GitHubDatabaseDownload {
const value = GITHUB_DATABASE_DOWNLOAD.getValue<GitHubDatabaseDownload>();
return GitHubDatabaseDownloadValues.includes(value) ? value : "ask";

View File

@@ -1,17 +1,11 @@
import { pathExists, outputJSON, readJSON, readJSONSync } from "fs-extra";
import { join } from "path";
import {
clearLocalDbConfig,
cloneDbConfig,
DbConfig,
initializeLocalDbConfig,
removeLocalDb,
removeLocalList,
removeRemoteList,
removeRemoteOwner,
removeRemoteRepo,
renameLocalDb,
renameLocalList,
renameRemoteList,
SelectedDbItem,
DB_CONFIG_VERSION,
@@ -30,13 +24,7 @@ import {
DbConfigValidationErrorKind,
} from "../db-validation-errors";
import { ValueResult } from "../../common/value-result";
import {
LocalDatabaseDbItem,
LocalListDbItem,
RemoteUserDefinedListDbItem,
DbItem,
DbItemKind,
} from "../db-item";
import { RemoteUserDefinedListDbItem, DbItem, DbItemKind } from "../db-item";
export class DbConfigStore extends DisposableObject {
public static readonly databaseConfigFileName = "databases.json";
@@ -119,20 +107,9 @@ export class DbConfigStore extends DisposableObject {
let config: DbConfig;
switch (dbItem.kind) {
case DbItemKind.LocalList:
config = removeLocalList(this.config, dbItem.listName);
break;
case DbItemKind.RemoteUserDefinedList:
config = removeRemoteList(this.config, dbItem.listName);
break;
case DbItemKind.LocalDatabase:
// When we start using local databases these need to be removed from disk as well.
config = removeLocalDb(
this.config,
dbItem.databaseName,
dbItem.parentListName,
);
break;
case DbItemKind.RemoteRepo:
config = removeRemoteRepo(
this.config,
@@ -229,22 +206,6 @@ export class DbConfigStore extends DisposableObject {
await this.writeConfig(config);
}
public async addLocalList(listName: string): Promise<void> {
if (!this.config) {
throw Error("Cannot add local list if config is not loaded");
}
this.validateLocalListName(listName);
const config = cloneDbConfig(this.config);
config.databases.local.lists.push({
name: listName,
databases: [],
});
await this.writeConfig(config);
}
public async addRemoteList(listName: string): Promise<void> {
if (!this.config) {
throw Error("Cannot add variant analysis list if config is not loaded");
@@ -261,25 +222,6 @@ export class DbConfigStore extends DisposableObject {
await this.writeConfig(config);
}
public async renameLocalList(
currentDbItem: LocalListDbItem,
newName: string,
) {
if (!this.config) {
throw Error("Cannot rename local list if config is not loaded");
}
this.validateLocalListName(newName);
const updatedConfig = renameLocalList(
this.config,
currentDbItem.listName,
newName,
);
await this.writeConfig(updatedConfig);
}
public async renameRemoteList(
currentDbItem: RemoteUserDefinedListDbItem,
newName: string,
@@ -301,27 +243,6 @@ export class DbConfigStore extends DisposableObject {
await this.writeConfig(updatedConfig);
}
public async renameLocalDb(
currentDbItem: LocalDatabaseDbItem,
newName: string,
parentListName?: string,
): Promise<void> {
if (!this.config) {
throw Error("Cannot rename local db if config is not loaded");
}
this.validateLocalDbName(newName);
const updatedConfig = renameLocalDb(
this.config,
currentDbItem.databaseName,
newName,
parentListName,
);
await this.writeConfig(updatedConfig);
}
public doesRemoteListExist(listName: string): boolean {
if (!this.config) {
throw Error(
@@ -334,31 +255,6 @@ export class DbConfigStore extends DisposableObject {
);
}
public doesLocalListExist(listName: string): boolean {
if (!this.config) {
throw Error("Cannot check local list existence if config is not loaded");
}
return this.config.databases.local.lists.some((l) => l.name === listName);
}
public doesLocalDbExist(dbName: string, listName?: string): boolean {
if (!this.config) {
throw Error(
"Cannot check variant analysis repository existence if config is not loaded",
);
}
if (listName) {
return this.config.databases.local.lists.some(
(l) =>
l.name === listName && l.databases.some((d) => d.name === dbName),
);
}
return this.config.databases.local.databases.some((d) => d.name === dbName);
}
public doesRemoteDbExist(dbName: string, listName?: string): boolean {
if (!this.config) {
throw Error(
@@ -384,7 +280,6 @@ export class DbConfigStore extends DisposableObject {
}
private async writeConfig(config: DbConfig): Promise<void> {
clearLocalDbConfig(config);
await outputJSON(this.configPath, config, {
spaces: 2,
});
@@ -416,7 +311,6 @@ export class DbConfigStore extends DisposableObject {
}
if (newConfig) {
initializeLocalDbConfig(newConfig);
this.configErrors = this.configValidator.validate(newConfig);
}
@@ -451,7 +345,6 @@ export class DbConfigStore extends DisposableObject {
}
if (newConfig) {
initializeLocalDbConfig(newConfig);
this.configErrors = this.configValidator.validate(newConfig);
}
@@ -499,10 +392,6 @@ export class DbConfigStore extends DisposableObject {
owners: [],
repositories: [],
},
local: {
lists: [],
databases: [],
},
},
selected: {
kind: SelectedDbItemKind.VariantAnalysisSystemDefinedList,
@@ -511,16 +400,6 @@ export class DbConfigStore extends DisposableObject {
};
}
private validateLocalListName(listName: string): void {
if (listName === "") {
throw Error("List name cannot be empty");
}
if (this.doesLocalListExist(listName)) {
throw Error(`A local list with the name '${listName}' already exists`);
}
}
private validateRemoteListName(listName: string): void {
if (listName === "") {
throw Error("List name cannot be empty");
@@ -532,14 +411,4 @@ export class DbConfigStore extends DisposableObject {
);
}
}
private validateLocalDbName(dbName: string): void {
if (dbName === "") {
throw Error("Database name cannot be empty");
}
if (this.doesLocalDbExist(dbName)) {
throw Error(`A local database with the name '${dbName}' already exists`);
}
}
}

View File

@@ -1,7 +1,7 @@
import { readJsonSync } from "fs-extra";
import { resolve } from "path";
import Ajv, { ValidateFunction } from "ajv";
import { clearLocalDbConfig, DbConfig } from "./db-config";
import { DbConfig } from "./db-config";
import { findDuplicateStrings } from "../../common/text-utils";
import {
DbConfigValidationError,
@@ -19,8 +19,6 @@ export class DbConfigValidator {
}
public validate(dbConfig: DbConfig): DbConfigValidationError[] {
const localDbs = clearLocalDbConfig(dbConfig);
this.validateSchemaFn(dbConfig);
if (this.validateSchemaFn.errors) {
@@ -30,13 +28,6 @@ export class DbConfigValidator {
}));
}
// Add any local db config back so that we have a config
// object that respects its type and validation can happen
// as normal.
if (localDbs) {
dbConfig.databases.local = localDbs;
}
return [
...this.validateDbListNames(dbConfig),
...this.validateDbNames(dbConfig),
@@ -55,14 +46,6 @@ export class DbConfigValidator {
)}`,
});
const duplicateLocalDbLists = findDuplicateStrings(
dbConfig.databases.local.lists.map((n) => n.name),
);
if (duplicateLocalDbLists.length > 0) {
errors.push(buildError(duplicateLocalDbLists));
}
const duplicateRemoteDbLists = findDuplicateStrings(
dbConfig.databases.variantAnalysis.repositoryLists.map((n) => n.name),
);
@@ -81,14 +64,6 @@ export class DbConfigValidator {
message: `There are databases with the same name: ${dups.join(", ")}`,
});
const duplicateLocalDbs = findDuplicateStrings(
dbConfig.databases.local.databases.map((d) => d.name),
);
if (duplicateLocalDbs.length > 0) {
errors.push(buildError(duplicateLocalDbs));
}
const duplicateRemoteDbs = findDuplicateStrings(
dbConfig.databases.variantAnalysis.repositories,
);
@@ -111,13 +86,6 @@ export class DbConfigValidator {
)}`,
});
for (const list of dbConfig.databases.local.lists) {
const dups = findDuplicateStrings(list.databases.map((d) => d.name));
if (dups.length > 0) {
errors.push(buildError(list.name, dups));
}
}
for (const list of dbConfig.databases.variantAnalysis.repositoryLists) {
const dups = findDuplicateStrings(list.repositories);
if (dups.length > 0) {

View File

@@ -1,8 +1,6 @@
// Contains models and consts for the data we want to store in the database config.
// Changes to these models should be done carefully and account for backwards compatibility of data.
import { DatabaseOrigin } from "../local-databases/database-origin";
export const DB_CONFIG_VERSION = 1;
export interface DbConfig {
@@ -13,37 +11,21 @@ export interface DbConfig {
interface DbConfigDatabases {
variantAnalysis: RemoteDbConfig;
local: LocalDbConfig;
}
export type SelectedDbItem =
| SelectedLocalUserDefinedList
| SelectedLocalDatabase
| SelectedRemoteSystemDefinedList
| SelectedVariantAnalysisUserDefinedList
| SelectedRemoteOwner
| SelectedRemoteRepository;
export enum SelectedDbItemKind {
LocalUserDefinedList = "localUserDefinedList",
LocalDatabase = "localDatabase",
VariantAnalysisSystemDefinedList = "variantAnalysisSystemDefinedList",
VariantAnalysisUserDefinedList = "variantAnalysisUserDefinedList",
VariantAnalysisOwner = "variantAnalysisOwner",
VariantAnalysisRepository = "variantAnalysisRepository",
}
interface SelectedLocalUserDefinedList {
kind: SelectedDbItemKind.LocalUserDefinedList;
listName: string;
}
interface SelectedLocalDatabase {
kind: SelectedDbItemKind.LocalDatabase;
databaseName: string;
listName?: string;
}
interface SelectedRemoteSystemDefinedList {
kind: SelectedDbItemKind.VariantAnalysisSystemDefinedList;
listName: string;
@@ -76,24 +58,6 @@ export interface RemoteRepositoryList {
repositories: string[];
}
interface LocalDbConfig {
lists: LocalList[];
databases: LocalDatabase[];
}
export interface LocalList {
name: string;
databases: LocalDatabase[];
}
export interface LocalDatabase {
name: string;
dateAdded: number;
language: string;
origin: DatabaseOrigin;
storagePath: string;
}
export function cloneDbConfig(config: DbConfig): DbConfig {
return {
version: config.version,
@@ -108,13 +72,6 @@ export function cloneDbConfig(config: DbConfig): DbConfig {
owners: [...config.databases.variantAnalysis.owners],
repositories: [...config.databases.variantAnalysis.repositories],
},
local: {
lists: config.databases.local.lists.map((list) => ({
name: list.name,
databases: list.databases.map((db) => ({ ...db })),
})),
databases: config.databases.local.databases.map((db) => ({ ...db })),
},
},
selected: config.selected
? cloneDbConfigSelectedItem(config.selected)
@@ -122,28 +79,6 @@ export function cloneDbConfig(config: DbConfig): DbConfig {
};
}
export function renameLocalList(
originalConfig: DbConfig,
currentListName: string,
newListName: string,
): DbConfig {
const config = cloneDbConfig(originalConfig);
const list = getLocalList(config, currentListName);
list.name = newListName;
if (
config.selected?.kind === SelectedDbItemKind.LocalUserDefinedList ||
config.selected?.kind === SelectedDbItemKind.LocalDatabase
) {
if (config.selected.listName === currentListName) {
config.selected.listName = newListName;
}
}
return config;
}
export function renameRemoteList(
originalConfig: DbConfig,
currentListName: string,
@@ -167,67 +102,6 @@ export function renameRemoteList(
return config;
}
export function renameLocalDb(
originalConfig: DbConfig,
currentDbName: string,
newDbName: string,
parentListName?: string,
): DbConfig {
const config = cloneDbConfig(originalConfig);
if (parentListName) {
const list = getLocalList(config, parentListName);
const dbIndex = list.databases.findIndex((db) => db.name === currentDbName);
if (dbIndex === -1) {
throw Error(
`Cannot find database '${currentDbName}' in list '${parentListName}'`,
);
}
list.databases[dbIndex].name = newDbName;
} else {
const dbIndex = config.databases.local.databases.findIndex(
(db) => db.name === currentDbName,
);
if (dbIndex === -1) {
throw Error(`Cannot find database '${currentDbName}' in local databases`);
}
config.databases.local.databases[dbIndex].name = newDbName;
}
if (
config.selected?.kind === SelectedDbItemKind.LocalDatabase &&
config.selected.databaseName === currentDbName
) {
config.selected.databaseName = newDbName;
}
return config;
}
export function removeLocalList(
originalConfig: DbConfig,
listName: string,
): DbConfig {
const config = cloneDbConfig(originalConfig);
config.databases.local.lists = config.databases.local.lists.filter(
(list) => list.name !== listName,
);
if (config.selected?.kind === SelectedDbItemKind.LocalUserDefinedList) {
config.selected = undefined;
}
if (
config.selected?.kind === SelectedDbItemKind.LocalDatabase &&
config.selected?.listName === listName
) {
config.selected = undefined;
}
return config;
}
export function removeRemoteList(
originalConfig: DbConfig,
listName: string,
@@ -255,35 +129,6 @@ export function removeRemoteList(
return config;
}
export function removeLocalDb(
originalConfig: DbConfig,
databaseName: string,
parentListName?: string,
): DbConfig {
const config = cloneDbConfig(originalConfig);
if (parentListName) {
const parentList = getLocalList(config, parentListName);
parentList.databases = parentList.databases.filter(
(db) => db.name !== databaseName,
);
} else {
config.databases.local.databases = config.databases.local.databases.filter(
(db) => db.name !== databaseName,
);
}
if (
config.selected?.kind === SelectedDbItemKind.LocalDatabase &&
config.selected?.databaseName === databaseName &&
config.selected?.listName === parentListName
) {
config.selected = undefined;
}
return config;
}
export function removeRemoteRepo(
originalConfig: DbConfig,
repoFullName: string,
@@ -333,51 +178,8 @@ export function removeRemoteOwner(
return config;
}
/**
* Removes local db config from a db config object, if one is set.
* We do this because we don't want to expose this feature to users
* yet (since it's only partially implemented), but we also don't want
* to remove all the code we've already implemented.
* @param config The config object to change.
* @returns Any removed local db config.
*/
export function clearLocalDbConfig(
config: DbConfig,
): LocalDbConfig | undefined {
let localDbs = undefined;
if (config && config.databases && config.databases.local) {
localDbs = config.databases.local;
delete (config.databases as any).local;
}
return localDbs;
}
/**
* Initializes the local db config, if the config object contains
* database configuration.
* @param config The config object to change.
*/
export function initializeLocalDbConfig(config: DbConfig): void {
if (config.databases) {
config.databases.local = { lists: [], databases: [] };
}
}
function cloneDbConfigSelectedItem(selected: SelectedDbItem): SelectedDbItem {
switch (selected.kind) {
case SelectedDbItemKind.LocalUserDefinedList:
return {
kind: SelectedDbItemKind.LocalUserDefinedList,
listName: selected.listName,
};
case SelectedDbItemKind.LocalDatabase:
return {
kind: SelectedDbItemKind.LocalDatabase,
databaseName: selected.databaseName,
listName: selected.listName,
};
case SelectedDbItemKind.VariantAnalysisSystemDefinedList:
return {
kind: SelectedDbItemKind.VariantAnalysisSystemDefinedList,
@@ -402,16 +204,6 @@ function cloneDbConfigSelectedItem(selected: SelectedDbItem): SelectedDbItem {
}
}
function getLocalList(config: DbConfig, listName: string): LocalList {
const list = config.databases.local.lists.find((l) => l.name === listName);
if (!list) {
throw Error(`Cannot find local list '${listName}'`);
}
return list;
}
function getRemoteList(
config: DbConfig,
listName: string,

View File

@@ -1,6 +1,5 @@
import fetch, { Response } from "node-fetch";
import { zip } from "zip-a-folder";
import { Open } from "unzipper";
import { Uri, window, InputBoxOptions } from "vscode";
import { CodeQLCliServer } from "../codeql-cli/cli";
import {
@@ -46,7 +45,7 @@ export async function promptImportInternetDatabase(
databaseManager: DatabaseManager,
storagePath: string,
progress: ProgressCallback,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
): Promise<DatabaseItem | undefined> {
const databaseUrl = await window.showInputBox({
prompt: "Enter URL of zipfile of database to download",
@@ -101,7 +100,7 @@ export async function promptImportGithubDatabase(
storagePath: string,
credentials: Credentials | undefined,
progress: ProgressCallback,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
language?: string,
makeSelected = true,
addSourceArchiveFolder = addDatabaseSourceToWorkspace(),
@@ -180,7 +179,7 @@ export async function downloadGitHubDatabase(
storagePath: string,
credentials: Credentials | undefined,
progress: ProgressCallback,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
language?: string,
makeSelected = true,
addSourceArchiveFolder = addDatabaseSourceToWorkspace(),
@@ -235,7 +234,7 @@ export async function downloadGitHubDatabaseFromUrl(
progress: ProgressCallback,
databaseManager: DatabaseManager,
storagePath: string,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
makeSelected = true,
addSourceArchiveFolder = true,
): Promise<DatabaseItem | undefined> {
@@ -279,6 +278,7 @@ export async function downloadGitHubDatabaseFromUrl(
* @param databaseUrl the file url of the archive to import
* @param databaseManager the DatabaseManager
* @param storagePath where to store the unzipped database.
* @param cli the CodeQL CLI server
*/
export async function importArchiveDatabase(
commandManager: AppCommandManager,
@@ -286,7 +286,7 @@ export async function importArchiveDatabase(
databaseManager: DatabaseManager,
storagePath: string,
progress: ProgressCallback,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
): Promise<DatabaseItem | undefined> {
try {
const item = await databaseArchiveFetcher(
@@ -333,6 +333,7 @@ export async function importArchiveDatabase(
* @param nameOverride a name for the database that overrides the default
* @param origin the origin of the database
* @param progress callback to send progress messages to
* @param cli the CodeQL CLI server
* @param makeSelected make the new database selected in the databases panel (default: true)
* @param addSourceArchiveFolder whether to add a workspace folder containing the source archive to the workspace
*/
@@ -344,7 +345,7 @@ async function databaseArchiveFetcher(
nameOverride: string | undefined,
origin: DatabaseOrigin,
progress: ProgressCallback,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
makeSelected = true,
addSourceArchiveFolder = addDatabaseSourceToWorkspace(),
): Promise<DatabaseItem> {
@@ -443,34 +444,24 @@ function validateUrl(databaseUrl: string) {
async function readAndUnzip(
zipUrl: string,
unzipPath: string,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
progress?: ProgressCallback,
) {
// TODO: Providing progress as the file is unzipped is currently blocked
// on https://github.com/ZJONSSON/node-unzipper/issues/222
const zipFile = Uri.parse(zipUrl).fsPath;
progress?.({
maxStep: 10,
step: 9,
message: `Unzipping into ${basename(unzipPath)}`,
});
if (cli) {
// Use the `database unbundle` command if the installed cli version supports it
await cli.databaseUnbundle(zipFile, unzipPath);
} else {
// Must get the zip central directory since streaming the
// zip contents may not have correct local file headers.
// Instead, we can only rely on the central directory.
const directory = await Open.file(zipFile);
await directory.extract({ path: unzipPath });
}
await cli.databaseUnbundle(zipFile, unzipPath);
}
async function fetchAndUnzip(
databaseUrl: string,
requestHeaders: { [key: string]: string },
unzipPath: string,
cli?: CodeQLCliServer,
cli: CodeQLCliServer,
progress?: ProgressCallback,
) {
// Although it is possible to download and stream directly to an unzipped directory,

View File

@@ -1,27 +1,14 @@
import { DbItem, DbItemKind, flattenDbItems } from "./db-item";
export type ExpandedDbItem =
| RootLocalExpandedDbItem
| LocalUserDefinedListExpandedDbItem
| RootRemoteExpandedDbItem
| RemoteUserDefinedListExpandedDbItem;
export enum ExpandedDbItemKind {
RootLocal = "rootLocal",
LocalUserDefinedList = "localUserDefinedList",
RootRemote = "rootRemote",
RemoteUserDefinedList = "remoteUserDefinedList",
}
interface RootLocalExpandedDbItem {
kind: ExpandedDbItemKind.RootLocal;
}
interface LocalUserDefinedListExpandedDbItem {
kind: ExpandedDbItemKind.LocalUserDefinedList;
listName: string;
}
interface RootRemoteExpandedDbItem {
kind: ExpandedDbItemKind.RootRemote;
}
@@ -80,13 +67,6 @@ export function cleanNonExistentExpandedItems(
function mapDbItemToExpandedDbItem(dbItem: DbItem): ExpandedDbItem {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
return { kind: ExpandedDbItemKind.RootLocal };
case DbItemKind.LocalList:
return {
kind: ExpandedDbItemKind.LocalUserDefinedList,
listName: dbItem.listName,
};
case DbItemKind.RootRemote:
return { kind: ExpandedDbItemKind.RootRemote };
case DbItemKind.RemoteUserDefinedList:
@@ -104,13 +84,6 @@ function isDbItemEqualToExpandedDbItem(
expandedDbItem: ExpandedDbItem,
) {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
return expandedDbItem.kind === ExpandedDbItemKind.RootLocal;
case DbItemKind.LocalList:
return (
expandedDbItem.kind === ExpandedDbItemKind.LocalUserDefinedList &&
expandedDbItem.listName === dbItem.listName
);
case DbItemKind.RootRemote:
return expandedDbItem.kind === ExpandedDbItemKind.RootRemote;
case DbItemKind.RemoteUserDefinedList:
@@ -118,7 +91,6 @@ function isDbItemEqualToExpandedDbItem(
expandedDbItem.kind === ExpandedDbItemKind.RemoteUserDefinedList &&
expandedDbItem.listName === dbItem.listName
);
case DbItemKind.LocalDatabase:
case DbItemKind.RemoteSystemDefinedList:
case DbItemKind.RemoteOwner:
case DbItemKind.RemoteRepo:

View File

@@ -2,17 +2,13 @@ import { DbItem, DbItemKind } from "./db-item";
export function getDbItemName(dbItem: DbItem): string | undefined {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
case DbItemKind.RootRemote:
return undefined;
case DbItemKind.LocalList:
case DbItemKind.RemoteUserDefinedList:
case DbItemKind.RemoteSystemDefinedList:
return dbItem.listName;
case DbItemKind.RemoteOwner:
return dbItem.ownerName;
case DbItemKind.LocalDatabase:
return dbItem.databaseName;
case DbItemKind.RemoteRepo:
return dbItem.repoFullName;
}

View File

@@ -1,12 +1,9 @@
import { DbItem, DbItemKind, LocalDbItem, RemoteDbItem } from "./db-item";
import { DbItem, DbItemKind, RemoteDbItem } from "./db-item";
import { SelectedDbItem, SelectedDbItemKind } from "./config/db-config";
export function getSelectedDbItem(dbItems: DbItem[]): DbItem | undefined {
for (const dbItem of dbItems) {
if (
dbItem.kind === DbItemKind.RootRemote ||
dbItem.kind === DbItemKind.RootLocal
) {
if (dbItem.kind === DbItemKind.RootRemote) {
for (const child of dbItem.children) {
const selectedItem = extractSelected(child);
if (selectedItem) {
@@ -23,20 +20,11 @@ export function getSelectedDbItem(dbItems: DbItem[]): DbItem | undefined {
return undefined;
}
function extractSelected(
dbItem: RemoteDbItem | LocalDbItem,
): DbItem | undefined {
function extractSelected(dbItem: RemoteDbItem): DbItem | undefined {
if (dbItem.selected) {
return dbItem;
}
switch (dbItem.kind) {
case DbItemKind.LocalList:
for (const database of dbItem.databases) {
if (database.selected) {
return database;
}
}
break;
case DbItemKind.RemoteUserDefinedList:
for (const repo of dbItem.repos) {
if (repo.selected) {
@@ -52,17 +40,10 @@ export function mapDbItemToSelectedDbItem(
dbItem: DbItem,
): SelectedDbItem | undefined {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
case DbItemKind.RootRemote:
// Root items are not selectable.
return undefined;
case DbItemKind.LocalList:
return {
kind: SelectedDbItemKind.LocalUserDefinedList,
listName: dbItem.listName,
};
case DbItemKind.RemoteUserDefinedList:
return {
kind: SelectedDbItemKind.VariantAnalysisUserDefinedList,
@@ -81,13 +62,6 @@ export function mapDbItemToSelectedDbItem(
ownerName: dbItem.ownerName,
};
case DbItemKind.LocalDatabase:
return {
kind: SelectedDbItemKind.LocalDatabase,
databaseName: dbItem.databaseName,
listName: dbItem?.parentListName,
};
case DbItemKind.RemoteRepo:
return {
kind: SelectedDbItemKind.VariantAnalysisRepository,

View File

@@ -1,11 +1,6 @@
// This file contains models that are used to represent the databases.
import { DatabaseOrigin } from "./local-databases/database-origin";
export enum DbItemKind {
RootLocal = "RootLocal",
LocalList = "LocalList",
LocalDatabase = "LocalDatabase",
RootRemote = "RootRemote",
RemoteSystemDefinedList = "RemoteSystemDefinedList",
RemoteUserDefinedList = "RemoteUserDefinedList",
@@ -13,49 +8,13 @@ export enum DbItemKind {
RemoteRepo = "RemoteRepo",
}
export enum DbListKind {
Local = "Local",
Remote = "Remote",
}
export interface RootLocalDbItem {
kind: DbItemKind.RootLocal;
expanded: boolean;
children: LocalDbItem[];
}
export type LocalDbItem = LocalListDbItem | LocalDatabaseDbItem;
export interface LocalListDbItem {
kind: DbItemKind.LocalList;
expanded: boolean;
selected: boolean;
listName: string;
databases: LocalDatabaseDbItem[];
}
export interface LocalDatabaseDbItem {
kind: DbItemKind.LocalDatabase;
selected: boolean;
databaseName: string;
dateAdded: number;
language: string;
origin: DatabaseOrigin;
storagePath: string;
parentListName?: string;
}
export interface RootRemoteDbItem {
kind: DbItemKind.RootRemote;
expanded: boolean;
children: RemoteDbItem[];
}
export type DbItem =
| RootLocalDbItem
| RootRemoteDbItem
| RemoteDbItem
| LocalDbItem;
export type DbItem = RootRemoteDbItem | RemoteDbItem;
export type RemoteDbItem =
| RemoteSystemDefinedListDbItem
@@ -108,25 +67,13 @@ export function isRemoteRepoDbItem(dbItem: DbItem): dbItem is RemoteRepoDbItem {
return dbItem.kind === DbItemKind.RemoteRepo;
}
export function isLocalListDbItem(dbItem: DbItem): dbItem is LocalListDbItem {
return dbItem.kind === DbItemKind.LocalList;
}
export function isLocalDatabaseDbItem(
dbItem: DbItem,
): dbItem is LocalDatabaseDbItem {
return dbItem.kind === DbItemKind.LocalDatabase;
}
type SelectableDbItem = RemoteDbItem | LocalDbItem;
type SelectableDbItem = RemoteDbItem;
export function isSelectableDbItem(dbItem: DbItem): dbItem is SelectableDbItem {
return SelectableDbItemKinds.includes(dbItem.kind);
}
const SelectableDbItemKinds = [
DbItemKind.LocalList,
DbItemKind.LocalDatabase,
DbItemKind.RemoteSystemDefinedList,
DbItemKind.RemoteUserDefinedList,
DbItemKind.RemoteOwner,
@@ -139,19 +86,12 @@ export function flattenDbItems(dbItems: DbItem[]): DbItem[] {
for (const dbItem of dbItems) {
allItems.push(dbItem);
switch (dbItem.kind) {
case DbItemKind.RootLocal:
allItems.push(...flattenDbItems(dbItem.children));
break;
case DbItemKind.LocalList:
allItems.push(...flattenDbItems(dbItem.databases));
break;
case DbItemKind.RootRemote:
allItems.push(...flattenDbItems(dbItem.children));
break;
case DbItemKind.RemoteUserDefinedList:
allItems.push(...dbItem.repos);
break;
case DbItemKind.LocalDatabase:
case DbItemKind.RemoteSystemDefinedList:
case DbItemKind.RemoteOwner:
case DbItemKind.RemoteRepo:

View File

@@ -3,14 +3,7 @@ import { AppEvent, AppEventEmitter } from "../common/events";
import { ValueResult } from "../common/value-result";
import { DisposableObject } from "../common/disposable-object";
import { DbConfigStore } from "./config/db-config-store";
import {
DbItem,
DbItemKind,
DbListKind,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteUserDefinedListDbItem,
} from "./db-item";
import { DbItem, RemoteUserDefinedListDbItem } from "./db-item";
import {
updateExpandedItem,
replaceExpandedItem,
@@ -116,31 +109,15 @@ export class DbManager extends DisposableObject {
await this.dbConfigStore.addRemoteOwner(owner);
}
public async addNewList(
listKind: DbListKind,
listName: string,
): Promise<void> {
switch (listKind) {
case DbListKind.Local:
await this.dbConfigStore.addLocalList(listName);
break;
case DbListKind.Remote:
await this.dbConfigStore.addRemoteList(listName);
break;
default:
throw Error(`Unknown list kind '${listKind}'`);
}
public async addNewList(listName: string): Promise<void> {
await this.dbConfigStore.addRemoteList(listName);
}
public async renameList(
currentDbItem: LocalListDbItem | RemoteUserDefinedListDbItem,
currentDbItem: RemoteUserDefinedListDbItem,
newName: string,
): Promise<void> {
if (currentDbItem.kind === DbItemKind.LocalList) {
await this.dbConfigStore.renameLocalList(currentDbItem, newName);
} else if (currentDbItem.kind === DbItemKind.RemoteUserDefinedList) {
await this.dbConfigStore.renameRemoteList(currentDbItem, newName);
}
await this.dbConfigStore.renameRemoteList(currentDbItem, newName);
const newDbItem = { ...currentDbItem, listName: newName };
const newExpandedItems = replaceExpandedItem(
@@ -152,26 +129,8 @@ export class DbManager extends DisposableObject {
await this.setExpandedItems(newExpandedItems);
}
public async renameLocalDb(
currentDbItem: LocalDatabaseDbItem,
newName: string,
): Promise<void> {
await this.dbConfigStore.renameLocalDb(
currentDbItem,
newName,
currentDbItem.parentListName,
);
}
public doesListExist(listKind: DbListKind, listName: string): boolean {
switch (listKind) {
case DbListKind.Local:
return this.dbConfigStore.doesLocalListExist(listName);
case DbListKind.Remote:
return this.dbConfigStore.doesRemoteListExist(listName);
default:
throw Error(`Unknown list kind '${listKind}'`);
}
public doesListExist(listName: string): boolean {
return this.dbConfigStore.doesRemoteListExist(listName);
}
public doesRemoteOwnerExist(owner: string): boolean {
@@ -182,10 +141,6 @@ export class DbManager extends DisposableObject {
return this.dbConfigStore.doesRemoteDbExist(nwo, listName);
}
public doesLocalDbExist(dbName: string, listName?: string): boolean {
return this.dbConfigStore.doesLocalDbExist(dbName, listName);
}
private getExpandedItems(): ExpandedDbItem[] {
const items = this.app.workspaceState.get<ExpandedDbItem[]>(
DbManager.DB_EXPANDED_STATE_KEY,

View File

@@ -1,19 +1,14 @@
import {
DbConfig,
LocalDatabase,
LocalList,
RemoteRepositoryList,
SelectedDbItemKind,
} from "./config/db-config";
import {
DbItemKind,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteOwnerDbItem,
RemoteRepoDbItem,
RemoteSystemDefinedListDbItem,
RemoteUserDefinedListDbItem,
RootLocalDbItem,
RootRemoteDbItem,
} from "./db-item";
import { ExpandedDbItem, ExpandedDbItemKind } from "./db-item-expansion";
@@ -55,28 +50,6 @@ export function createRemoteTree(
};
}
export function createLocalTree(
dbConfig: DbConfig,
expandedItems: ExpandedDbItem[],
): RootLocalDbItem {
const localLists = dbConfig.databases.local.lists.map((l) =>
createLocalList(l, dbConfig, expandedItems),
);
const localDbs = dbConfig.databases.local.databases.map((l) =>
createLocalDb(l, dbConfig),
);
const expanded = expandedItems.some(
(e) => e.kind === ExpandedDbItemKind.RootLocal,
);
return {
kind: DbItemKind.RootLocal,
children: [...localLists, ...localDbs],
expanded: !!expanded,
};
}
function createSystemDefinedList(
n: number,
dbConfig: DbConfig,
@@ -155,51 +128,3 @@ function createRepoItem(
parentListName: listName,
};
}
function createLocalList(
list: LocalList,
dbConfig: DbConfig,
expandedItems: ExpandedDbItem[],
): LocalListDbItem {
const selected =
dbConfig.selected &&
dbConfig.selected.kind === SelectedDbItemKind.LocalUserDefinedList &&
dbConfig.selected.listName === list.name;
const expanded = expandedItems.some(
(e) =>
e.kind === ExpandedDbItemKind.LocalUserDefinedList &&
e.listName === list.name,
);
return {
kind: DbItemKind.LocalList,
listName: list.name,
databases: list.databases.map((d) => createLocalDb(d, dbConfig, list.name)),
selected: !!selected,
expanded: !!expanded,
};
}
function createLocalDb(
db: LocalDatabase,
dbConfig: DbConfig,
listName?: string,
): LocalDatabaseDbItem {
const selected =
dbConfig.selected &&
dbConfig.selected.kind === SelectedDbItemKind.LocalDatabase &&
dbConfig.selected.databaseName === db.name &&
dbConfig.selected.listName === listName;
return {
kind: DbItemKind.LocalDatabase,
databaseName: db.name,
dateAdded: db.dateAdded,
language: db.language,
origin: db.origin,
storagePath: db.storagePath,
selected: !!selected,
parentListName: listName,
};
}

View File

@@ -60,10 +60,6 @@ export class GitHubDatabasesModule extends DisposableObject {
}
private async initialize(): Promise<void> {
if (!this.config.enable) {
return;
}
// Start the check and downloading the database asynchronously. We don't want to block on this
// in extension activation since this makes network requests and waits for user input.
void this.promptGitHubRepositoryDownload().catch((e: unknown) => {

View File

@@ -233,7 +233,7 @@ export class DatabaseUI extends DisposableObject {
private app: App,
private databaseManager: DatabaseManager,
languageContext: LanguageContextStore,
private readonly queryServer: QueryRunner | undefined,
private readonly queryServer: QueryRunner,
private readonly storagePath: string,
readonly extensionPath: string,
) {
@@ -402,10 +402,7 @@ export class DatabaseUI extends DisposableObject {
workspace.workspaceFolders[0].uri.fsPath,
"tutorial-queries",
);
const cli = this.queryServer?.cliServer;
if (!cli) {
throw new Error("No CLI server found");
}
const cli = this.queryServer.cliServer;
await cli.packInstall(tutorialQueriesPath);
}
}
@@ -528,7 +525,7 @@ export class DatabaseUI extends DisposableObject {
this.databaseManager,
this.storagePath,
progress,
this.queryServer?.cliServer,
this.queryServer.cliServer,
);
},
{
@@ -548,7 +545,7 @@ export class DatabaseUI extends DisposableObject {
this.storagePath,
credentials,
progress,
this.queryServer?.cliServer,
this.queryServer.cliServer,
);
},
{
@@ -704,7 +701,7 @@ export class DatabaseUI extends DisposableObject {
this.databaseManager,
this.storagePath,
progress,
this.queryServer?.cliServer,
this.queryServer.cliServer,
);
} else {
await this.databaseManager.openDatabase(uri, {
@@ -836,7 +833,7 @@ export class DatabaseUI extends DisposableObject {
this.databaseManager,
this.storagePath,
progress,
this.queryServer?.cliServer,
this.queryServer.cliServer,
);
}
},

View File

@@ -424,9 +424,8 @@ export class DatabaseManager extends DisposableObject {
step: ++step,
});
const databaseItem = await this.createDatabaseItemFromPersistedState(
database,
);
const databaseItem =
await this.createDatabaseItemFromPersistedState(database);
try {
await this.refreshDatabase(databaseItem);
await this.registerDatabase(databaseItem);

View File

@@ -9,20 +9,15 @@ import {
window as Window,
workspace,
} from "vscode";
import {
LineColumnLocation,
ResolvableLocationValue,
UrlValue,
WholeFileLocation,
} from "../../common/bqrs-cli-types";
import {
isLineColumnLoc,
tryGetResolvableLocation,
} from "../../common/bqrs-utils";
import { getErrorMessage } from "../../common/helpers-pure";
import { assertNever, getErrorMessage } from "../../common/helpers-pure";
import { Logger } from "../../common/logging";
import { DatabaseItem } from "./database-item";
import { DatabaseManager } from "./database-manager";
import {
UrlValueLineColumnLocation,
UrlValueResolvable,
UrlValueWholeFileLocation,
} from "../../common/raw-result-types";
const findMatchBackground = new ThemeColor("editor.findMatchBackground");
const findRangeHighlightBackground = new ThemeColor(
@@ -45,7 +40,7 @@ export const shownLocationLineDecoration =
* @param databaseItem Database in which to resolve the file location.
*/
function resolveFivePartLocation(
loc: LineColumnLocation,
loc: UrlValueLineColumnLocation,
databaseItem: DatabaseItem,
): Location {
// `Range` is a half-open interval, and is zero-based. CodeQL locations are closed intervals, and
@@ -66,7 +61,7 @@ function resolveFivePartLocation(
* @param databaseItem Database in which to resolve the filesystem resource location.
*/
function resolveWholeFileLocation(
loc: WholeFileLocation,
loc: UrlValueWholeFileLocation,
databaseItem: DatabaseItem,
): Location {
// A location corresponding to the start of the file.
@@ -81,21 +76,25 @@ function resolveWholeFileLocation(
* @param databaseItem Database in which to resolve the file location.
*/
export function tryResolveLocation(
loc: UrlValue | undefined,
loc: UrlValueResolvable | undefined,
databaseItem: DatabaseItem,
): Location | undefined {
const resolvableLoc = tryGetResolvableLocation(loc);
if (!resolvableLoc || typeof resolvableLoc === "string") {
if (!loc) {
return;
} else if (isLineColumnLoc(resolvableLoc)) {
return resolveFivePartLocation(resolvableLoc, databaseItem);
} else {
return resolveWholeFileLocation(resolvableLoc, databaseItem);
}
switch (loc.type) {
case "wholeFileLocation":
return resolveWholeFileLocation(loc, databaseItem);
case "lineColumnLocation":
return resolveFivePartLocation(loc, databaseItem);
default:
assertNever(loc);
}
}
export async function showResolvableLocation(
loc: ResolvableLocationValue,
loc: UrlValueResolvable,
databaseItem: DatabaseItem,
logger: Logger,
): Promise<void> {
@@ -153,7 +152,7 @@ export async function showLocation(location?: Location) {
export async function jumpToLocation(
databaseUri: string,
loc: ResolvableLocationValue,
loc: UrlValueResolvable,
databaseManager: DatabaseManager,
logger: Logger,
) {

View File

@@ -1,6 +1,5 @@
import { DbItem, DbItemKind } from "../db-item";
import {
createDbTreeViewItemLocalDatabase,
createDbTreeViewItemOwner,
createDbTreeViewItemRepo,
createDbTreeViewItemRoot,
@@ -11,14 +10,6 @@ import {
export function mapDbItemToTreeViewItem(dbItem: DbItem): DbTreeViewItem {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
return createDbTreeViewItemRoot(
dbItem,
"local",
"Local databases",
dbItem.children.map((c) => mapDbItemToTreeViewItem(c)),
);
case DbItemKind.RootRemote:
return createDbTreeViewItemRoot(
dbItem,
@@ -46,19 +37,5 @@ export function mapDbItemToTreeViewItem(dbItem: DbItem): DbTreeViewItem {
case DbItemKind.RemoteRepo:
return createDbTreeViewItemRepo(dbItem, dbItem.repoFullName);
case DbItemKind.LocalList:
return createDbTreeViewItemUserDefinedList(
dbItem,
dbItem.listName,
dbItem.databases.map(mapDbItemToTreeViewItem),
);
case DbItemKind.LocalDatabase:
return createDbTreeViewItemLocalDatabase(
dbItem,
dbItem.databaseName,
dbItem.language,
);
}
}

View File

@@ -17,14 +17,7 @@ import {
isValidGitHubOwner,
} from "../../common/github-url-identifier-helper";
import { DisposableObject } from "../../common/disposable-object";
import {
DbItem,
DbItemKind,
DbListKind,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteUserDefinedListDbItem,
} from "../db-item";
import { DbItem, DbItemKind, RemoteUserDefinedListDbItem } from "../db-item";
import { getDbItemName } from "../db-item-naming";
import { DbManager } from "../db-manager";
import { DbTreeDataProvider } from "./db-tree-data-provider";
@@ -42,10 +35,6 @@ export interface RemoteDatabaseQuickPickItem extends QuickPickItem {
remoteDatabaseKind: string;
}
export interface AddListQuickPickItem extends QuickPickItem {
databaseKind: DbListKind;
}
interface CodeSearchQuickPickItem extends QuickPickItem {
language: string;
}
@@ -223,8 +212,6 @@ export class DbPanel extends DisposableObject {
}
private async addNewList(): Promise<void> {
const listKind = DbListKind.Remote;
const listName = await window.showInputBox({
prompt: "Enter a name for the new list",
placeHolder: "example-list",
@@ -233,7 +220,7 @@ export class DbPanel extends DisposableObject {
return;
}
if (this.dbManager.doesListExist(listKind, listName)) {
if (this.dbManager.doesListExist(listName)) {
void showAndLogErrorMessage(
this.app.logger,
`The list '${listName}' already exists`,
@@ -241,7 +228,7 @@ export class DbPanel extends DisposableObject {
return;
}
await this.dbManager.addNewList(listKind, listName);
await this.dbManager.addNewList(listName);
}
private async setSelectedItem(treeViewItem: DbTreeViewItem): Promise<void> {
@@ -277,59 +264,13 @@ export class DbPanel extends DisposableObject {
return;
}
switch (dbItem.kind) {
case DbItemKind.LocalList:
await this.renameLocalListItem(dbItem, newName);
break;
case DbItemKind.LocalDatabase:
await this.renameLocalDatabaseItem(dbItem, newName);
break;
case DbItemKind.RemoteUserDefinedList:
await this.renameVariantAnalysisUserDefinedListItem(dbItem, newName);
break;
default:
throw Error(`Action not allowed for the '${dbItem.kind}' db item kind`);
if (dbItem.kind === DbItemKind.RemoteUserDefinedList) {
await this.renameVariantAnalysisUserDefinedListItem(dbItem, newName);
} else {
throw Error(`Action not allowed for the '${dbItem.kind}' db item kind`);
}
}
private async renameLocalListItem(
dbItem: LocalListDbItem,
newName: string,
): Promise<void> {
if (dbItem.listName === newName) {
return;
}
if (this.dbManager.doesListExist(DbListKind.Local, newName)) {
void showAndLogErrorMessage(
this.app.logger,
`The list '${newName}' already exists`,
);
return;
}
await this.dbManager.renameList(dbItem, newName);
}
private async renameLocalDatabaseItem(
dbItem: LocalDatabaseDbItem,
newName: string,
): Promise<void> {
if (dbItem.databaseName === newName) {
return;
}
if (this.dbManager.doesLocalDbExist(newName, dbItem.parentListName)) {
void showAndLogErrorMessage(
this.app.logger,
`The database '${newName}' already exists`,
);
return;
}
await this.dbManager.renameLocalDb(dbItem, newName);
}
private async renameVariantAnalysisUserDefinedListItem(
dbItem: RemoteUserDefinedListDbItem,
newName: string,
@@ -338,7 +279,7 @@ export class DbPanel extends DisposableObject {
return;
}
if (this.dbManager.doesListExist(DbListKind.Remote, newName)) {
if (this.dbManager.doesListExist(newName)) {
void showAndLogErrorMessage(
this.app.logger,
`The list '${newName}' already exists`,

View File

@@ -29,18 +29,12 @@ export function getDbItemActions(dbItem: DbItem): DbTreeViewItemAction[] {
}
const dbItemKindsThatCanBeRemoved = [
DbItemKind.LocalList,
DbItemKind.RemoteUserDefinedList,
DbItemKind.LocalDatabase,
DbItemKind.RemoteRepo,
DbItemKind.RemoteOwner,
];
const dbItemKindsThatCanBeRenamed = [
DbItemKind.LocalList,
DbItemKind.RemoteUserDefinedList,
DbItemKind.LocalDatabase,
];
const dbItemKindsThatCanBeRenamed = [DbItemKind.RemoteUserDefinedList];
const dbItemKindsThatCanBeOpenedOnGitHub = [
DbItemKind.RemoteOwner,

View File

@@ -2,13 +2,10 @@ import * as vscode from "vscode";
import {
DbItem,
isSelectableDbItem,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteOwnerDbItem,
RemoteRepoDbItem,
RemoteSystemDefinedListDbItem,
RemoteUserDefinedListDbItem,
RootLocalDbItem,
RootRemoteDbItem,
} from "../db-item";
import { getDbItemActions } from "./db-tree-view-item-action";
@@ -74,7 +71,7 @@ export function createDbTreeViewItemError(
}
export function createDbTreeViewItemRoot(
dbItem: RootLocalDbItem | RootRemoteDbItem,
dbItem: RootRemoteDbItem,
label: string,
tooltip: string,
children: DbTreeViewItem[],
@@ -105,7 +102,7 @@ export function createDbTreeViewItemSystemDefinedList(
}
export function createDbTreeViewItemUserDefinedList(
dbItem: LocalListDbItem | RemoteUserDefinedListDbItem,
dbItem: RemoteUserDefinedListDbItem,
listName: string,
children: DbTreeViewItem[],
): DbTreeViewItem {
@@ -147,21 +144,6 @@ export function createDbTreeViewItemRepo(
);
}
export function createDbTreeViewItemLocalDatabase(
dbItem: LocalDatabaseDbItem,
databaseName: string,
language: string,
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
new vscode.ThemeIcon("database"),
databaseName,
`Language: ${language}`,
vscode.TreeItemCollapsibleState.None,
[],
);
}
function getCollapsibleState(
expanded: boolean,
): vscode.TreeItemCollapsibleState {

View File

@@ -542,8 +542,8 @@ async function installOrUpdateDistribution(
const messageText = willUpdateCodeQl
? "Updating CodeQL CLI"
: codeQlInstalled
? "Checking for updates to CodeQL CLI"
: "Installing CodeQL CLI";
? "Checking for updates to CodeQL CLI"
: "Installing CodeQL CLI";
try {
await installOrUpdateDistributionWithProgressTitle(
@@ -564,8 +564,8 @@ async function installOrUpdateDistribution(
willUpdateCodeQl
? "update"
: codeQlInstalled
? "check for updates to"
: "install"
? "check for updates to"
: "install"
} CodeQL CLI`;
if (e instanceof GithubRateLimitedError) {
@@ -1086,23 +1086,27 @@ async function activateWithInstalledDistribution(
// Jump-to-definition and find-references
void extLogger.log("Registering jump-to-definition handlers.");
languages.registerDefinitionProvider(
{ scheme: zipArchiveScheme },
new TemplateQueryDefinitionProvider(
cliServer,
qs,
dbm,
contextualQueryStorageDir,
ctx.subscriptions.push(
languages.registerDefinitionProvider(
{ scheme: zipArchiveScheme },
new TemplateQueryDefinitionProvider(
cliServer,
qs,
dbm,
contextualQueryStorageDir,
),
),
);
languages.registerReferenceProvider(
{ scheme: zipArchiveScheme },
new TemplateQueryReferenceProvider(
cliServer,
qs,
dbm,
contextualQueryStorageDir,
ctx.subscriptions.push(
languages.registerReferenceProvider(
{ scheme: zipArchiveScheme },
new TemplateQueryReferenceProvider(
cliServer,
qs,
dbm,
contextualQueryStorageDir,
),
),
);

View File

@@ -2,13 +2,14 @@ import { CodeQLCliServer } from "../../codeql-cli/cli";
import {
DecodedBqrsChunk,
BqrsId,
EntityValue,
BqrsEntityValue,
} from "../../common/bqrs-cli-types";
import { DatabaseItem } from "../../databases/local-databases";
import { ChildAstItem, AstItem } from "./ast-viewer";
import { Uri } from "vscode";
import { QueryOutputDir } from "../../run-queries-shared";
import { fileRangeFromURI } from "../contextual/file-range-from-uri";
import { mapUrlValue } from "../../common/bqrs-raw-results-mapper";
/**
* A class that wraps a tree of QL results from a query that
@@ -55,8 +56,8 @@ export class AstBuilder {
// Build up the parent-child relationships
edgeTuples.tuples.forEach((tuple) => {
const [source, target, tupleType, value] = tuple as [
EntityValue,
EntityValue,
BqrsEntityValue,
BqrsEntityValue,
string,
string,
];
@@ -90,7 +91,11 @@ export class AstBuilder {
// populate parents and children
nodeTuples.tuples.forEach((tuple) => {
const [entity, tupleType, value] = tuple as [EntityValue, string, string];
const [entity, tupleType, value] = tuple as [
BqrsEntityValue,
string,
string,
];
const id = entity.id!;
switch (tupleType) {
@@ -106,7 +111,7 @@ export class AstBuilder {
const item = {
id,
label,
location: entity.url,
location: entity.url ? mapUrlValue(entity.url) : undefined,
fileLocation: fileRangeFromURI(entity.url, this.db),
children: [] as ChildAstItem[],
order: Number.MAX_SAFE_INTEGER,

View File

@@ -16,20 +16,20 @@ import {
import { basename } from "path";
import { DatabaseItem } from "../../databases/local-databases";
import { UrlValue, BqrsId } from "../../common/bqrs-cli-types";
import { BqrsId } from "../../common/bqrs-cli-types";
import { showLocation } from "../../databases/local-databases/locations";
import {
isStringLoc,
isWholeFileLoc,
isLineColumnLoc,
} from "../../common/bqrs-utils";
import { DisposableObject } from "../../common/disposable-object";
import { asError, getErrorMessage } from "../../common/helpers-pure";
import {
asError,
assertNever,
getErrorMessage,
} from "../../common/helpers-pure";
import { redactableError } from "../../common/errors";
import { AstViewerCommands } from "../../common/commands";
import { extLogger } from "../../common/logging/vscode";
import { showAndLogExceptionWithTelemetry } from "../../common/logging";
import { telemetryListener } from "../../common/vscode/telemetry";
import { UrlValue } from "../../common/raw-result-types";
export interface AstItem {
id: BqrsId;
@@ -90,15 +90,18 @@ class AstViewerDataProvider
private extractLineInfo(loc?: UrlValue) {
if (!loc) {
return "";
} else if (isStringLoc(loc)) {
return loc;
} else if (isWholeFileLoc(loc)) {
return loc.uri;
} else if (isLineColumnLoc(loc)) {
return loc.startLine;
} else {
return "";
return;
}
switch (loc.type) {
case "string":
return loc.value;
case "wholeFileLocation":
return loc.uri;
case "lineColumnLocation":
return loc.startLine;
default:
assertNever(loc);
}
}
}

View File

@@ -1,11 +1,14 @@
import * as vscode from "vscode";
import { UrlValue, LineColumnLocation } from "../../common/bqrs-cli-types";
import {
BqrsUrlValue,
BqrsLineColumnLocation,
} from "../../common/bqrs-cli-types";
import { isEmptyPath } from "../../common/bqrs-utils";
import { DatabaseItem } from "../../databases/local-databases";
export function fileRangeFromURI(
uri: UrlValue | undefined,
uri: BqrsUrlValue | undefined,
db: DatabaseItem,
): vscode.Location | undefined {
if (!uri || typeof uri === "string") {
@@ -13,7 +16,7 @@ export function fileRangeFromURI(
} else if ("startOffset" in uri) {
return undefined;
} else {
const loc = uri as LineColumnLocation;
const loc = uri as BqrsLineColumnLocation;
if (isEmptyPath(loc.uri)) {
return undefined;
}

View File

@@ -3,10 +3,9 @@ import {
encodeArchiveBasePath,
} from "../../common/vscode/archive-filesystem-provider";
import {
ColumnKindCode,
EntityValue,
getResultSetSchema,
ResultSetSchema,
BqrsColumnKindCode,
BqrsEntityValue,
BqrsResultSetSchema,
} from "../../common/bqrs-cli-types";
import { CodeQLCliServer } from "../../codeql-cli/cli";
import { DatabaseItem, DatabaseManager } from "../../databases/local-databases";
@@ -99,12 +98,14 @@ async function getLinksFromResults(
const localLinks: FullLocationLink[] = [];
const bqrsPath = outputDir.bqrsPath;
const info = await cli.bqrsInfo(bqrsPath);
const selectInfo = getResultSetSchema(SELECT_QUERY_NAME, info);
const selectInfo = info["result-sets"].find(
(schema) => schema.name === SELECT_QUERY_NAME,
);
if (isValidSelect(selectInfo)) {
// TODO: Page this
const allTuples = await cli.bqrsDecode(bqrsPath, SELECT_QUERY_NAME);
for (const tuple of allTuples.tuples) {
const [src, dest] = tuple as [EntityValue, EntityValue];
const [src, dest] = tuple as [BqrsEntityValue, BqrsEntityValue];
const srcFile = src.url && fileRangeFromURI(src.url, db);
const destFile = dest.url && fileRangeFromURI(dest.url, db);
if (
@@ -130,12 +131,12 @@ function createTemplates(path: string): Record<string, string> {
};
}
function isValidSelect(selectInfo: ResultSetSchema | undefined) {
function isValidSelect(selectInfo: BqrsResultSetSchema | undefined) {
return (
selectInfo &&
selectInfo.columns.length === 3 &&
selectInfo.columns[0].kind === ColumnKindCode.ENTITY &&
selectInfo.columns[1].kind === ColumnKindCode.ENTITY &&
selectInfo.columns[2].kind === ColumnKindCode.STRING
selectInfo.columns[0].kind === BqrsColumnKindCode.ENTITY &&
selectInfo.columns[1].kind === BqrsColumnKindCode.ENTITY &&
selectInfo.columns[2].kind === BqrsColumnKindCode.STRING
);
}

View File

@@ -88,25 +88,18 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
uriString: string,
token: CancellationToken,
): Promise<LocationLink[]> {
return withProgress(
async (progress, tokenInner) => {
const multiToken = new MultiCancellationToken(token, tokenInner);
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
multiToken,
(src, _dest) => src === uriString,
);
},
{
cancellable: true,
title: "Finding definitions",
},
// Do not create a multitoken here. There will be no popup and users cannot click on anything to cancel this operation.
// This is because finding definitions can be triggered by a hover, which should not have a popup.
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
() => {}, // noop
token,
(src, _dest) => src === uriString,
);
}
}
@@ -161,6 +154,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
uriString: string,
token: CancellationToken,
): Promise<FullLocationLink[]> {
// Create a multitoken here. There will be a popup and users can click on it to cancel this operation.
return withProgress(
async (progress, tokenInner) => {
const multiToken = new MultiCancellationToken(token, tokenInner);

View File

@@ -60,11 +60,7 @@ import {
shownLocationLineDecoration,
jumpToLocation,
} from "../databases/local-databases/locations";
import {
RawResultSet,
transformBqrsResultSet,
ResultSetSchema,
} from "../common/bqrs-cli-types";
import { bqrsToResultSet } from "../common/bqrs-raw-results-mapper";
import {
AbstractWebview,
WebviewPanelConfig,
@@ -76,6 +72,8 @@ import { redactableError } from "../common/errors";
import { ResultsViewCommands } from "../common/commands";
import { App } from "../common/app";
import { Disposable } from "../common/disposable-object";
import { RawResultSet } from "../common/raw-result-types";
import { BqrsResultSetSchema } from "../common/bqrs-cli-types";
/**
* results-view.ts
@@ -106,9 +104,9 @@ function sortInterpretedResults(
a.message.text === undefined
? 0
: b.message.text === undefined
? 0
: multiplier *
a.message.text?.localeCompare(b.message.text, env.language),
? 0
: multiplier *
a.message.text?.localeCompare(b.message.text, env.language),
);
break;
default:
@@ -136,7 +134,7 @@ function numPagesOfResultSet(
const n =
interpretation?.data.t === "GraphInterpretationData"
? interpretation.data.dot.length
: resultSet.schema.rows;
: resultSet.totalRowCount;
return Math.ceil(n / pageSize);
}
@@ -524,16 +522,16 @@ export class ResultsView extends AbstractWebview<
offset: schema.pagination?.offsets[0],
pageSize,
});
const resultSet = transformBqrsResultSet(schema, chunk);
const resultSet = bqrsToResultSet(schema, chunk);
fullQuery.completedQuery.setResultCount(
interpretationPage?.numTotalResults || resultSet.schema.rows,
interpretationPage?.numTotalResults || resultSet.totalRowCount,
);
const parsedResultSets: ParsedResultSets = {
pageNumber: 0,
pageSize,
numPages: numPagesOfResultSet(resultSet, this._interpretation),
numInterpretedPages: numInterpretedPages(this._interpretation),
resultSet: { ...resultSet, t: "RawResultSet" },
resultSet: { t: "RawResultSet", resultSet },
selectedTable: undefined,
resultSetNames,
};
@@ -601,7 +599,7 @@ export class ResultsView extends AbstractWebview<
private async getResultSetSchemas(
completedQuery: CompletedQueryInfo,
selectedTable = "",
): Promise<ResultSetSchema[]> {
): Promise<BqrsResultSetSchema[]> {
const resultsPath = completedQuery.getResultsPath(selectedTable);
const schemas = await this.cliServer.bqrsInfo(
resultsPath,
@@ -668,12 +666,12 @@ export class ResultsView extends AbstractWebview<
pageSize,
},
);
const resultSet = transformBqrsResultSet(schema, chunk);
const resultSet = bqrsToResultSet(schema, chunk);
const parsedResultSets: ParsedResultSets = {
pageNumber,
pageSize,
resultSet: { t: "RawResultSet", ...resultSet },
resultSet: { t: "RawResultSet", resultSet },
numPages: numPagesOfResultSet(resultSet),
numInterpretedPages: numInterpretedPages(this._interpretation),
selectedTable,

View File

@@ -507,9 +507,8 @@ export class SkeletonQueryWizard {
): Promise<DatabaseItem | undefined> {
const defaultDatabaseNwo = QUERY_LANGUAGE_TO_DATABASE_REPO[language];
const dbItems = await SkeletonQueryWizard.sortDatabaseItemsByDateAdded(
databaseItems,
);
const dbItems =
await SkeletonQueryWizard.sortDatabaseItemsByDateAdded(databaseItems);
const defaultDatabaseItem = await SkeletonQueryWizard.findDatabaseItemByNwo(
language,

View File

@@ -1,4 +1,3 @@
import * as I from "immutable";
import {
EvaluationLogProblemReporter,
EvaluationLogScanner,
@@ -37,7 +36,7 @@ function makeKey(
return `${queryCausingWork}:${predicate}${suffix ? ` ${suffix}` : ""}`;
}
function getDependentPredicates(operations: string[]): I.List<string> {
function getDependentPredicates(operations: string[]): string[] {
const id = String.raw`[0-9a-zA-Z:#_\./]+`;
const idWithAngleBrackets = String.raw`[0-9a-zA-Z:#_<>\./]+`;
const quotedId = String.raw`\`[^\`\r\n]*\``;
@@ -68,10 +67,10 @@ function getDependentPredicates(operations: string[]): I.List<string> {
String.raw`\{[0-9]+\}\s+(?:[0-9a-zA-Z]+\s=|\|)\s(?:` + regexps.join("|")
})`,
);
return I.List(operations).flatMap((operation) => {
return operations.flatMap((operation) => {
const matches = r.exec(operation.trim()) || [];
return I.List(matches)
.rest() // Skip the first group as it's just the entire string
return matches
.slice(1) // Skip the first group as it's just the entire string
.filter((x) => !!x)
.flatMap((x) => x.split(",")) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
.flatMap((x) => x.split(" UNION ")) // Split n-ary unions into individual arguments.
@@ -152,7 +151,7 @@ function computeJoinOrderBadness(
interface Bucket {
tupleCounts: Int32Array;
resultSize: number;
dependentPredicateSizes: I.Map<string, number>;
dependentPredicateSizes: Map<string, number>;
}
class JoinOrderScanner implements EvaluationLogScanner {
@@ -407,12 +406,12 @@ class JoinOrderScanner implements EvaluationLogScanner {
const dependentPredicates = getDependentPredicates(
inLayerEvent.ra[raReference],
);
let dependentPredicateSizes: I.Map<string, number>;
let dependentPredicateSizes: Map<string, number>;
// We treat the base case as a non-recursive pipeline. In that case, the dependent predicates are
// the dependencies of the base case and the cur_deltas.
if (raReference === "base") {
dependentPredicateSizes = I.Map(
dependentPredicates.map((pred): [string, number] => {
dependentPredicateSizes = dependentPredicates
.map((pred): [string, number] => {
// A base case cannot contain a `prev_delta`, but it can contain a `cur_delta`.
let size = 0;
if (pred.endsWith("#cur_delta")) {
@@ -426,28 +425,27 @@ class JoinOrderScanner implements EvaluationLogScanner {
size = this.predicateSizes.get(hash)!;
}
return [pred, size];
}),
);
})
.reduce((acc, [pred, size]) => acc.set(pred, size), new Map());
} else {
// It's a non-base case in a recursive pipeline. In that case, the dependent predicates are
// only the prev_deltas.
dependentPredicateSizes = I.Map(
dependentPredicates
.flatMap((pred) => {
// If it's actually a prev_delta
if (pred.endsWith("#prev_delta")) {
// Return the predicate without the #prev_delta suffix.
return [pred.slice(0, -"#prev_delta".length)];
} else {
// Not a recursive delta. Skip it.
return [];
}
})
.map((prev): [string, number] => {
const size = this.prevDeltaSizes(event, prev, iteration);
return [prev, size];
}),
);
dependentPredicateSizes = dependentPredicates
.flatMap((pred) => {
// If it's actually a prev_delta
if (pred.endsWith("#prev_delta")) {
// Return the predicate without the #prev_delta suffix.
return [pred.slice(0, -"#prev_delta".length)];
} else {
// Not a recursive delta. Skip it.
return [];
}
})
.map((prev): [string, number] => {
const size = this.prevDeltaSizes(event, prev, iteration);
return [prev, size];
})
.reduce((acc, [pred, size]) => acc.set(pred, size), new Map());
}
const deltaSize = inLayerEvent.deltaSizes[iteration];
@@ -475,7 +473,7 @@ class JoinOrderScanner implements EvaluationLogScanner {
orderTobucket.set(raReference, {
tupleCounts: new Int32Array(0),
resultSize: 0,
dependentPredicateSizes: I.Map(),
dependentPredicateSizes: new Map(),
});
}
@@ -494,12 +492,18 @@ class JoinOrderScanner implements EvaluationLogScanner {
this.problemReporter,
);
const resultSize = bucket.resultSize + deltaSize;
// Pointwise sum the deltas.
const newDependentPredicateSizes =
bucket.dependentPredicateSizes.mergeWith(
(oldSize, newSize) => oldSize + newSize,
dependentPredicateSizes,
const newDependentPredicateSizes = new Map<string, number>(
bucket.dependentPredicateSizes,
);
for (const [pred, size] of dependentPredicateSizes) {
newDependentPredicateSizes.set(
pred,
(newDependentPredicateSizes.get(pred) ?? 0) + size,
);
}
orderTobucket.set(raReference, {
tupleCounts: newTupleCounts,
resultSize,

View File

@@ -85,9 +85,8 @@ export async function runAutoModelQueries({
// CodeQL needs to have access to the database to be able to retrieve the
// snippets from it. The source location prefix is used to determine the
// base path of the database.
const sourceLocationPrefix = await databaseItem.getSourceLocationPrefix(
cliServer,
);
const sourceLocationPrefix =
await databaseItem.getSourceLocationPrefix(cliServer);
const sourceArchiveUri = databaseItem.sourceArchive;
const sourceInfo =
sourceArchiveUri === undefined

View File

@@ -1,11 +1,13 @@
import { DecodedBqrsChunk } from "../common/bqrs-cli-types";
import { Call, CallClassification, Method } from "./method";
import { DecodedBqrsChunk, BqrsEntityValue } from "../common/bqrs-cli-types";
import { CallClassification, Method, Usage } from "./method";
import { ModeledMethodType } from "./modeled-method";
import { parseLibraryFilename } from "./library";
import { Mode } from "./shared/mode";
import { ApplicationModeTuple, FrameworkModeTuple } from "./queries/query";
import { QueryLanguage } from "../common/query-language";
import { getModelsAsDataLanguage } from "./languages";
import { mapUrlValue } from "../common/bqrs-raw-results-mapper";
import { isUrlValueResolvable } from "../common/raw-result-types";
export function decodeBqrsToMethods(
chunk: DecodedBqrsChunk,
@@ -17,7 +19,7 @@ export function decodeBqrsToMethods(
const definition = getModelsAsDataLanguage(language);
chunk?.tuples.forEach((tuple) => {
let usage: Call;
let usageEntityValue: BqrsEntityValue;
let packageName: string;
let typeName: string;
let methodName: string;
@@ -30,7 +32,7 @@ export function decodeBqrsToMethods(
if (mode === Mode.Application) {
[
usage,
usageEntityValue,
packageName,
typeName,
methodName,
@@ -43,7 +45,7 @@ export function decodeBqrsToMethods(
] = tuple as ApplicationModeTuple;
} else {
[
usage,
usageEntityValue,
packageName,
typeName,
methodName,
@@ -97,11 +99,25 @@ export function decodeBqrsToMethods(
});
}
if (usageEntityValue.url === undefined) {
return;
}
const usageUrl = mapUrlValue(usageEntityValue.url);
if (!usageUrl || !isUrlValueResolvable(usageUrl)) {
return;
}
if (!usageEntityValue.label) {
return;
}
const method = methodsByApiName.get(signature)!;
const usages = [
const usages: Usage[] = [
...method.usages,
{
...usage,
label: usageEntityValue.label,
url: usageUrl,
classification,
},
];

View File

@@ -0,0 +1,69 @@
import { Method } from "./method";
import { ModeledMethod } from "./modeled-method";
import { BaseLogger } from "../common/logging";
interface Notifier {
missingMethod(signature: string): void;
inconsistentSupported(signature: string, expectedSupported: boolean): void;
}
export function checkConsistency(
methods: readonly Method[],
modeledMethods: Readonly<Record<string, readonly ModeledMethod[]>>,
notifier: Notifier,
) {
const methodsBySignature = methods.reduce(
(acc, method) => {
acc[method.signature] = method;
return acc;
},
{} as Record<string, Method>,
);
for (const signature in modeledMethods) {
const method = methodsBySignature[signature];
if (!method) {
notifier.missingMethod(signature);
continue;
}
const modeledMethodsForSignature = modeledMethods[signature];
checkMethodConsistency(method, modeledMethodsForSignature, notifier);
}
}
function checkMethodConsistency(
method: Method,
modeledMethods: readonly ModeledMethod[],
notifier: Notifier,
) {
// Type models are currently not shown as `supported` since they do not give any model information.
const expectSupported = modeledMethods.some(
(m) => m.type !== "none" && m.type !== "type",
);
if (method.supported !== expectSupported) {
notifier.inconsistentSupported(method.signature, expectSupported);
}
}
export class DefaultNotifier implements Notifier {
constructor(private readonly logger: BaseLogger) {}
missingMethod(signature: string) {
void this.logger.log(
`Model editor query consistency check: Missing method ${signature} for method that is modeled.`,
);
}
inconsistentSupported(signature: string, expectedSupported: boolean) {
const expectedMessage = expectedSupported
? `Expected method to be supported, but it is not.`
: `Expected method to not be supported, but it is.`;
void this.logger.log(
`Model editor query consistency check: Inconsistent supported flag for method ${signature}. ${expectedMessage}`,
);
}
}

View File

@@ -48,7 +48,6 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
t: "setMethodModelingPanelViewState",
viewState: {
language: this.language,
showMultipleModels: this.modelConfig.showMultipleModels,
},
});
}

View File

@@ -1,9 +1,9 @@
import { ResolvableLocationValue } from "../common/bqrs-cli-types";
import { ModeledMethod, ModeledMethodType } from "./modeled-method";
import { UrlValueResolvable } from "../common/raw-result-types";
export type Call = {
type Call = {
readonly label: string;
readonly url: Readonly<ResolvableLocationValue>;
readonly url: Readonly<UrlValueResolvable>;
};
export enum CallClassification {

View File

@@ -19,6 +19,7 @@ import { assertNever } from "../../common/helpers-pure";
import { ModeledMethod } from "../modeled-method";
import { groupMethods, sortGroupNames, sortMethods } from "../shared/sorting";
import { INITIAL_MODE, Mode } from "../shared/mode";
import { UrlValueResolvable } from "../../common/raw-result-types";
export class MethodsUsageDataProvider
extends DisposableObject
@@ -99,11 +100,16 @@ export class MethodsUsageDataProvider
} else {
const { method, usage } = item;
const description =
usage.url.type === "wholeFileLocation"
? this.relativePathWithinDatabase(usage.url.uri)
: `${this.relativePathWithinDatabase(usage.url.uri)} [${
usage.url.startLine
}, ${usage.url.endLine}]`;
return {
label: usage.label,
description: `${this.relativePathWithinDatabase(usage.url.uri)} [${
usage.url.startLine
}, ${usage.url.endLine}]`,
description,
collapsibleState: TreeItemCollapsibleState.None,
command: {
title: "Show usage",
@@ -211,14 +217,35 @@ function usagesAreEqual(u1: Usage, u2: Usage): boolean {
return (
u1.label === u2.label &&
u1.classification === u2.classification &&
u1.url.uri === u2.url.uri &&
u1.url.startLine === u2.url.startLine &&
u1.url.startColumn === u2.url.startColumn &&
u1.url.endLine === u2.url.endLine &&
u1.url.endColumn === u2.url.endColumn
urlValueResolvablesAreEqual(u1.url, u2.url)
);
}
function urlValueResolvablesAreEqual(
u1: UrlValueResolvable,
u2: UrlValueResolvable,
): boolean {
if (u1.type !== u2.type) {
return false;
}
if (u1.type === "wholeFileLocation" && u2.type === "wholeFileLocation") {
return u1.uri === u2.uri;
}
if (u1.type === "lineColumnLocation" && u2.type === "lineColumnLocation") {
return (
u1.uri === u2.uri &&
u1.startLine === u2.startLine &&
u1.startColumn === u2.startColumn &&
u1.endLine === u2.endLine &&
u1.endColumn === u2.endColumn
);
}
return false;
}
function sortMethodsInGroups(methods: readonly Method[], mode: Mode): Method[] {
const grouped = groupMethods(methods, mode);

View File

@@ -24,6 +24,7 @@ import { ModelingEvents } from "./modeling-events";
import { getModelsAsDataLanguage } from "./languages";
import { INITIAL_MODE } from "./shared/mode";
import { isSupportedLanguage } from "./supported-languages";
import { DefaultNotifier, checkConsistency } from "./consistency-check";
export class ModelEditorModule extends DisposableObject {
private readonly queryStorageDir: string;
@@ -99,6 +100,20 @@ export class ModelEditorModule extends DisposableObject {
await this.showMethod(event.databaseItem, event.method, event.usage);
}),
);
this.push(
this.modelingEvents.onMethodsChanged((event) => {
const modeledMethods = this.modelingStore.getModeledMethods(
event.databaseItem,
);
checkConsistency(
event.methods,
modeledMethods,
new DefaultNotifier(this.app.logger),
);
}),
);
}
private async showMethod(

View File

@@ -385,7 +385,6 @@ export class ModelEditorView extends AbstractWebview<
language: this.language,
showGenerateButton,
showLlmButton,
showMultipleModels: this.modelConfig.showMultipleModels,
mode: this.modelingStore.getMode(this.databaseItem),
showModeSwitchButton,
sourceArchiveAvailable,
@@ -482,9 +481,8 @@ export class ModelEditorView extends AbstractWebview<
// In application mode, we need the database of a specific library to generate
// the modeled methods. In framework mode, we'll use the current database.
if (mode === Mode.Application) {
addedDatabase = await this.promptChooseNewOrExistingDatabase(
progress,
);
addedDatabase =
await this.promptChooseNewOrExistingDatabase(progress);
if (!addedDatabase) {
return;
}
@@ -562,9 +560,8 @@ export class ModelEditorView extends AbstractWebview<
private async modelDependency(): Promise<void> {
return withProgress(async (progress, token) => {
const addedDatabase = await this.promptChooseNewOrExistingDatabase(
progress,
);
const addedDatabase =
await this.promptChooseNewOrExistingDatabase(progress);
if (!addedDatabase || token.isCancellationRequested) {
return;
}

View File

@@ -9,6 +9,7 @@ import { Mode } from "./shared/mode";
interface MethodsChangedEvent {
readonly methods: readonly Method[];
readonly dbUri: string;
readonly databaseItem: DatabaseItem;
readonly isActiveDb: boolean;
}
@@ -166,10 +167,12 @@ export class ModelingEvents extends DisposableObject {
public fireMethodsChangedEvent(
methods: Method[],
dbUri: string,
databaseItem: DatabaseItem,
isActiveDb: boolean,
) {
this.onMethodsChangedEventEmitter.fire({
methods,
databaseItem,
dbUri,
isActiveDb,
});

View File

@@ -155,6 +155,7 @@ export class ModelingStore extends DisposableObject {
this.modelingEvents.fireMethodsChangedEvent(
methods,
dbUri,
dbItem,
dbUri === this.activeDb,
);
}

View File

@@ -1,264 +0,0 @@
import { Query } from "./query";
export const fetchExternalApisQuery: Query = {
applicationModeQuery: `/**
* @name Fetch endpoints for use in the model editor (application mode)
* @description A list of 3rd party endpoints (methods and attributes) used in the codebase. Excludes test and generated code.
* @kind table
* @id csharp/utils/modeleditor/application-mode-endpoints
* @tags modeleditor endpoints application-mode
*/
import csharp
import ApplicationModeEndpointsQuery
import ModelEditor
private Call aUsage(ExternalEndpoint api) { result.getTarget().getUnboundDeclaration() = api }
from ExternalEndpoint endpoint, boolean supported, Call usage, string type, string classification
where
supported = isSupported(endpoint) and
usage = aUsage(endpoint) and
type = supportedType(endpoint) and
classification = methodClassification(usage)
select usage, endpoint.getNamespace(), endpoint.getTypeName(), endpoint.getName(),
endpoint.getParameterTypes(), supported, endpoint.dllName(), endpoint.dllVersion(), type,
classification
`,
frameworkModeQuery: `/**
* @name Fetch endpoints for use in the model editor (framework mode)
* @description A list of endpoints accessible (methods and attributes) for consumers of the library. Excludes test and generated code.
* @kind table
* @id csharp/utils/modeleditor/framework-mode-endpoints
* @tags modeleditor endpoints framework-mode
*/
import csharp
import FrameworkModeEndpointsQuery
import ModelEditor
from PublicEndpointFromSource endpoint, boolean supported, string type
where
supported = isSupported(endpoint) and
type = supportedType(endpoint)
select endpoint, endpoint.getNamespace(), endpoint.getTypeName(), endpoint.getName(),
endpoint.getParameterTypes(), supported, endpoint.getFile().getBaseName(), type
`,
dependencies: {
"ApplicationModeEndpointsQuery.qll": `private import csharp
private import semmle.code.csharp.dataflow.ExternalFlow as ExternalFlow
private import semmle.code.csharp.dataflow.internal.DataFlowDispatch as DataFlowDispatch
private import semmle.code.csharp.dataflow.internal.DataFlowPrivate
private import semmle.code.csharp.dataflow.internal.TaintTrackingPrivate
private import semmle.code.csharp.security.dataflow.flowsources.Remote
private import ModelEditor
/**
* A class of effectively public callables in library code.
*/
class ExternalEndpoint extends Endpoint {
ExternalEndpoint() { this.fromLibrary() }
/** Gets a node that is an input to a call to this API. */
private ArgumentNode getAnInput() {
result
.getCall()
.(DataFlowDispatch::NonDelegateDataFlowCall)
.getATarget(_)
.getUnboundDeclaration() = this
}
/** Gets a node that is an output from a call to this API. */
private DataFlow::Node getAnOutput() {
exists(Call c, DataFlowDispatch::NonDelegateDataFlowCall dc |
dc.getDispatchCall().getCall() = c and
c.getTarget().getUnboundDeclaration() = this
|
result = DataFlowDispatch::getAnOutNode(dc, _)
)
}
override predicate hasSummary() {
Endpoint.super.hasSummary()
or
defaultAdditionalTaintStep(this.getAnInput(), _)
}
override predicate isSource() {
this.getAnOutput() instanceof RemoteFlowSource or ExternalFlow::sourceNode(this.getAnOutput(), _)
}
override predicate isSink() { ExternalFlow::sinkNode(this.getAnInput(), _) }
}
`,
"FrameworkModeEndpointsQuery.qll": `private import csharp
private import semmle.code.csharp.frameworks.Test
private import ModelEditor
/**
* A class of effectively public callables from source code.
*/
class PublicEndpointFromSource extends Endpoint {
PublicEndpointFromSource() { this.fromSource() and not this.getFile() instanceof TestFile }
override predicate isSource() { this instanceof SourceCallable }
override predicate isSink() { this instanceof SinkCallable }
}`,
"ModelEditor.qll": `/** Provides classes and predicates related to handling APIs for the VS Code extension. */
private import csharp
private import semmle.code.csharp.dataflow.FlowSummary
private import semmle.code.csharp.dataflow.internal.DataFlowPrivate
private import semmle.code.csharp.dataflow.internal.FlowSummaryImpl as FlowSummaryImpl
private import semmle.code.csharp.frameworks.Test
/** Holds if the given callable is not worth supporting. */
private predicate isUninteresting(Callable c) {
c.getDeclaringType() instanceof TestLibrary or
c.(Constructor).isParameterless() or
c.getDeclaringType() instanceof AnonymousClass
}
/**
* A callable method or accessor from either the C# Standard Library, a 3rd party library, or from the source.
*/
class Endpoint extends Callable {
Endpoint() {
[this.(Modifiable), this.(Accessor).getDeclaration()].isEffectivelyPublic() and
not isUninteresting(this) and
this.isUnboundDeclaration()
}
/**
* Gets the namespace of this endpoint.
*/
bindingset[this]
string getNamespace() { this.getDeclaringType().hasQualifiedName(result, _) }
/**
* Gets the unbound type name of this endpoint.
*/
bindingset[this]
string getTypeName() { result = nestedName(this.getDeclaringType().getUnboundDeclaration()) }
/**
* Gets the parameter types of this endpoint.
*/
bindingset[this]
string getParameterTypes() { result = "(" + parameterQualifiedTypeNamesToString(this) + ")" }
private string getDllName() { result = this.getLocation().(Assembly).getName() }
private string getDllVersion() { result = this.getLocation().(Assembly).getVersion().toString() }
string dllName() {
result = this.getDllName()
or
not exists(this.getDllName()) and result = this.getFile().getBaseName()
}
string dllVersion() {
result = this.getDllVersion()
or
not exists(this.getDllVersion()) and result = ""
}
/** Holds if this API has a supported summary. */
pragma[nomagic]
predicate hasSummary() { this instanceof SummarizedCallable }
/** Holds if this API is a known source. */
pragma[nomagic]
abstract predicate isSource();
/** Holds if this API is a known sink. */
pragma[nomagic]
abstract predicate isSink();
/** Holds if this API is a known neutral. */
pragma[nomagic]
predicate isNeutral() { this instanceof FlowSummaryImpl::Public::NeutralCallable }
/**
* Holds if this API is supported by existing CodeQL libraries, that is, it is either a
* recognized source, sink or neutral or it has a flow summary.
*/
predicate isSupported() {
this.hasSummary() or this.isSource() or this.isSink() or this.isNeutral()
}
}
boolean isSupported(Endpoint endpoint) {
if endpoint.isSupported() then result = true else result = false
}
string supportedType(Endpoint endpoint) {
endpoint.isSink() and result = "sink"
or
endpoint.isSource() and result = "source"
or
endpoint.hasSummary() and result = "summary"
or
endpoint.isNeutral() and result = "neutral"
or
not endpoint.isSupported() and result = ""
}
string methodClassification(Call method) {
method.getFile() instanceof TestFile and result = "test"
or
not method.getFile() instanceof TestFile and
result = "source"
}
/**
* Gets the nested name of the type \`t\`.
*
* If the type is not a nested type, the result is the same as \`getName()\`.
* Otherwise the name of the nested type is prefixed with a \`+\` and appended to
* the name of the enclosing type, which might be a nested type as well.
*/
private string nestedName(Type t) {
not exists(t.getDeclaringType().getUnboundDeclaration()) and
result = t.getName()
or
nestedName(t.getDeclaringType().getUnboundDeclaration()) + "+" + t.getName() = result
}
// Temporary copy of csharp/ql/src/Telemetry/TestLibrary.qll
pragma[nomagic]
private predicate isTestNamespace(Namespace ns) {
ns.getFullName()
.matches([
"NUnit.Framework%", "Xunit%", "Microsoft.VisualStudio.TestTools.UnitTesting%", "Moq%"
])
}
/**
* A test library.
*/
class TestLibrary extends RefType {
TestLibrary() { isTestNamespace(this.getNamespace()) }
}
// Temporary copy of csharp/ql/lib/semmle/code/csharp/dataflow/ExternalFlow.qll
private import semmle.code.csharp.dataflow.internal.FlowSummaryImplSpecific
/**
* A callable where there exists a MaD sink model that applies to it.
*/
class SinkCallable extends Callable {
SinkCallable() { sinkElement(this, _, _, _) }
}
/**
* A callable where there exists a MaD source model that applies to it.
*/
class SourceCallable extends Callable {
SourceCallable() { sourceElement(this, _, _, _) }
}
`,
},
};

View File

@@ -1,11 +1,7 @@
import { fetchExternalApisQuery as csharpFetchExternalApisQuery } from "./csharp";
import { fetchExternalApisQuery as javaFetchExternalApisQuery } from "./java";
import { fetchExternalApisQuery as rubyFetchExternalApisQuery } from "./ruby";
import { Query } from "./query";
import { QueryLanguage } from "../../common/query-language";
export const fetchExternalApiQueries: Partial<Record<QueryLanguage, Query>> = {
[QueryLanguage.CSharp]: csharpFetchExternalApisQuery,
[QueryLanguage.Java]: javaFetchExternalApisQuery,
[QueryLanguage.Ruby]: rubyFetchExternalApisQuery,
};

View File

@@ -1,234 +0,0 @@
import { Query } from "./query";
export const fetchExternalApisQuery: Query = {
applicationModeQuery: `/**
* @name Fetch endpoints for use in the model editor (application mode)
* @description A list of 3rd party endpoints (methods) used in the codebase. Excludes test and generated code.
* @kind table
* @id java/utils/modeleditor/application-mode-endpoints
* @tags modeleditor endpoints application-mode
*/
private import java
private import ApplicationModeEndpointsQuery
private import ModelEditor
private Call aUsage(ExternalEndpoint endpoint) {
result.getCallee().getSourceDeclaration() = endpoint
}
from ExternalEndpoint endpoint, boolean supported, Call usage, string type, string classification
where
supported = isSupported(endpoint) and
usage = aUsage(endpoint) and
type = supportedType(endpoint) and
classification = usageClassification(usage)
select usage, endpoint.getPackageName(), endpoint.getTypeName(), endpoint.getName(),
endpoint.getParameterTypes(), supported, endpoint.jarContainer(), endpoint.jarVersion(), type,
classification
`,
frameworkModeQuery: `/**
* @name Fetch endpoints for use in the model editor (framework mode)
* @description A list of endpoints accessible (methods) for consumers of the library. Excludes test and generated code.
* @kind table
* @id java/utils/modeleditor/framework-mode-endpoints
* @tags modeleditor endpoints framework-mode
*/
private import java
private import FrameworkModeEndpointsQuery
private import ModelEditor
from PublicEndpointFromSource endpoint, boolean supported, string type
where
supported = isSupported(endpoint) and
type = supportedType(endpoint)
select endpoint, endpoint.getPackageName(), endpoint.getTypeName(), endpoint.getName(),
endpoint.getParameterTypes(), supported,
endpoint.getCompilationUnit().getParentContainer().getBaseName(), type
`,
dependencies: {
"ApplicationModeEndpointsQuery.qll": `private import java
private import semmle.code.java.dataflow.ExternalFlow
private import semmle.code.java.dataflow.FlowSources
private import semmle.code.java.dataflow.internal.DataFlowPrivate
private import ModelEditor
/**
* A class of effectively public callables in library code.
*/
class ExternalEndpoint extends Endpoint {
ExternalEndpoint() { not this.fromSource() }
/** Gets a node that is an input to a call to this API. */
private DataFlow::Node getAnInput() {
exists(Call call | call.getCallee().getSourceDeclaration() = this |
result.asExpr().(Argument).getCall() = call or
result.(ArgumentNode).getCall().asCall() = call
)
}
/** Gets a node that is an output from a call to this API. */
private DataFlow::Node getAnOutput() {
exists(Call call | call.getCallee().getSourceDeclaration() = this |
result.asExpr() = call or
result.(DataFlow::PostUpdateNode).getPreUpdateNode().(ArgumentNode).getCall().asCall() = call
)
}
override predicate hasSummary() {
Endpoint.super.hasSummary()
or
TaintTracking::localAdditionalTaintStep(this.getAnInput(), _)
}
override predicate isSource() {
this.getAnOutput() instanceof RemoteFlowSource or sourceNode(this.getAnOutput(), _)
}
override predicate isSink() { sinkNode(this.getAnInput(), _) }
}
`,
"FrameworkModeEndpointsQuery.qll": `private import java
private import semmle.code.java.dataflow.internal.DataFlowPrivate
private import semmle.code.java.dataflow.internal.FlowSummaryImplSpecific
private import semmle.code.java.dataflow.internal.ModelExclusions
private import ModelEditor
/**
* A class of effectively public callables from source code.
*/
class PublicEndpointFromSource extends Endpoint, ModelApi {
override predicate isSource() { sourceElement(this, _, _, _) }
override predicate isSink() { sinkElement(this, _, _, _) }
}
`,
"ModelEditor.qll": `/** Provides classes and predicates related to handling APIs for the VS Code extension. */
private import java
private import semmle.code.java.dataflow.ExternalFlow
private import semmle.code.java.dataflow.FlowSummary
private import semmle.code.java.dataflow.TaintTracking
private import semmle.code.java.dataflow.internal.ModelExclusions
/** Holds if the given callable/method is not worth supporting. */
private predicate isUninteresting(Callable c) {
c.getDeclaringType() instanceof TestLibrary or
c.(Constructor).isParameterless() or
c.getDeclaringType() instanceof AnonymousClass
}
/**
* A callable method from either the Standard Library, a 3rd party library or from the source.
*/
class Endpoint extends Callable {
Endpoint() { not isUninteresting(this) }
/**
* Gets the package name of this endpoint.
*/
string getPackageName() { result = this.getDeclaringType().getPackage().getName() }
/**
* Gets the type name of this endpoint.
*/
string getTypeName() { result = this.getDeclaringType().nestedName() }
/**
* Gets the parameter types of this endpoint.
*/
string getParameterTypes() { result = paramsString(this) }
private string getJarName() {
result = this.getCompilationUnit().getParentContainer*().(JarFile).getBaseName()
}
private string getJarVersion() {
result = this.getCompilationUnit().getParentContainer*().(JarFile).getSpecificationVersion()
}
/**
* Gets the jar file containing this API. Normalizes the Java Runtime to "rt.jar" despite the presence of modules.
*/
string jarContainer() {
result = this.getJarName()
or
not exists(this.getJarName()) and result = "rt.jar"
}
/**
* Gets the version of the JAR file containing this API. Empty if no version is found in the JAR.
*/
string jarVersion() {
result = this.getJarVersion()
or
not exists(this.getJarVersion()) and result = ""
}
/** Holds if this API has a supported summary. */
pragma[nomagic]
predicate hasSummary() { this = any(SummarizedCallable sc).asCallable() }
/** Holds if this API is a known source. */
pragma[nomagic]
abstract predicate isSource();
/** Holds if this API is a known sink. */
pragma[nomagic]
abstract predicate isSink();
/** Holds if this API is a known neutral. */
pragma[nomagic]
predicate isNeutral() {
exists(string namespace, string type, string name, string signature |
neutralModel(namespace, type, name, signature, _, _) and
this = interpretElement(namespace, type, false, name, signature, "")
)
}
/**
* Holds if this API is supported by existing CodeQL libraries, that is, it is either a
* recognized source, sink or neutral or it has a flow summary.
*/
predicate isSupported() {
this.hasSummary() or this.isSource() or this.isSink() or this.isNeutral()
}
}
boolean isSupported(Endpoint endpoint) {
endpoint.isSupported() and result = true
or
not endpoint.isSupported() and result = false
}
string supportedType(Endpoint endpoint) {
endpoint.isSink() and result = "sink"
or
endpoint.isSource() and result = "source"
or
endpoint.hasSummary() and result = "summary"
or
endpoint.isNeutral() and result = "neutral"
or
not endpoint.isSupported() and result = ""
}
string usageClassification(Call usage) {
isInTestFile(usage.getLocation().getFile()) and result = "test"
or
usage.getFile() instanceof GeneratedFile and result = "generated"
or
not isInTestFile(usage.getLocation().getFile()) and
not usage.getFile() instanceof GeneratedFile and
result = "source"
}
// Temporarily copied from java/ql/lib/semmle/code/java/dataflow/internal/ModelExclusions.qll
predicate isInTestFile(File file) {
file.getAbsolutePath().matches(["%/test/%", "%/guava-tests/%", "%/guava-testlib/%"]) and
not file.getAbsolutePath().matches(["%/ql/test/%", "%/ql/automodel/test/%"]) // allows our test cases to work
}
`,
},
};

View File

@@ -1,5 +1,6 @@
import { Call, CallClassification } from "../method";
import { CallClassification } from "../method";
import { ModeledMethodType } from "../modeled-method";
import { BqrsEntityValue } from "../../common/bqrs-cli-types";
export type Query = {
/**
@@ -39,7 +40,7 @@ export type Query = {
};
export type ApplicationModeTuple = [
Call,
BqrsEntityValue,
string,
string,
string,
@@ -52,7 +53,7 @@ export type ApplicationModeTuple = [
];
export type FrameworkModeTuple = [
Call,
BqrsEntityValue,
string,
string,
string,

View File

@@ -1,16 +0,0 @@
import { ModeledMethod } from "../modeled-method";
/**
* Converts a ModeledMethod[] to a single ModeledMethod for legacy usage. This function should always be used instead
* of the trivial conversion to track usages of this conversion.
*
* This method should only be called inside a `postMessage` call. If it's used anywhere else, consider whether the
* boundary is correct: the boundary should as close as possible to the extension host -> webview boundary.
*
* @param modeledMethods The ModeledMethod[]
*/
export function convertToLegacyModeledMethod(
modeledMethods: ModeledMethod[],
): ModeledMethod | undefined {
return modeledMethods[0];
}

View File

@@ -7,7 +7,6 @@ export interface ModelEditorViewState {
language: QueryLanguage;
showGenerateButton: boolean;
showLlmButton: boolean;
showMultipleModels: boolean;
mode: Mode;
showModeSwitchButton: boolean;
sourceArchiveAvailable: boolean;
@@ -15,5 +14,4 @@ export interface ModelEditorViewState {
export interface MethodModelingPanelViewState {
language: QueryLanguage | undefined;
showMultipleModels: boolean;
}

View File

@@ -181,14 +181,23 @@ function createDataExtensionYamlsByGrouping(
>,
createFilename: (method: Method) => string,
): Record<string, string> {
const methodsByFilename: Record<string, Record<string, ModeledMethod[]>> = {};
const actualFilenameByCanonicalFilename: Record<string, string> = {};
const methodsByCanonicalFilename: Record<
string,
Record<string, ModeledMethod[]>
> = {};
// We only want to generate a yaml file when it's a known external API usage
// and there are new modeled methods for it. This avoids us overwriting other
// files that may contain data we don't know about.
for (const method of methods) {
if (method.signature in newModeledMethods) {
methodsByFilename[createFilename(method)] = {};
const filename = createFilename(method);
const canonicalFilename = canonicalizeFilename(filename);
methodsByCanonicalFilename[canonicalFilename] = {};
actualFilenameByCanonicalFilename[canonicalFilename] = filename;
}
}
@@ -196,10 +205,16 @@ function createDataExtensionYamlsByGrouping(
for (const [filename, methodsBySignature] of Object.entries(
existingModeledMethods,
)) {
if (filename in methodsByFilename) {
const canonicalFilename = canonicalizeFilename(filename);
if (canonicalFilename in methodsByCanonicalFilename) {
for (const [signature, methods] of Object.entries(methodsBySignature)) {
methodsByFilename[filename][signature] = [...methods];
methodsByCanonicalFilename[canonicalFilename][signature] = [...methods];
}
// Ensure that if a file exists on disk, we use the same capitalization
// as the original file.
actualFilenameByCanonicalFilename[canonicalFilename] = filename;
}
}
@@ -209,19 +224,25 @@ function createDataExtensionYamlsByGrouping(
const newMethods = newModeledMethods[method.signature];
if (newMethods) {
const filename = createFilename(method);
const canonicalFilename = canonicalizeFilename(filename);
// Override any existing modeled methods with the new ones.
methodsByFilename[filename][method.signature] = [...newMethods];
methodsByCanonicalFilename[canonicalFilename][method.signature] = [
...newMethods,
];
}
}
const result: Record<string, string> = {};
for (const [filename, methods] of Object.entries(methodsByFilename)) {
result[filename] = createDataExtensionYaml(
language,
Object.values(methods).flatMap((methods) => methods),
);
for (const [canonicalFilename, methods] of Object.entries(
methodsByCanonicalFilename,
)) {
result[actualFilenameByCanonicalFilename[canonicalFilename]] =
createDataExtensionYaml(
language,
Object.values(methods).flatMap((methods) => methods),
);
}
return result;
@@ -299,6 +320,13 @@ export function createFilenameForPackage(
return `${prefix}${packageName}${suffix}.yml`;
}
function canonicalizeFilename(filename: string) {
// We want to canonicalize filenames so that they are always in the same format
// for comparison purposes. This is important because we want to avoid overwriting
// data extension YAML files on case-insensitive file systems.
return filename.toLowerCase();
}
function validateModelExtensionFile(data: unknown): data is ModelExtensionFile {
modelExtensionFileSchemaValidate(data);

View File

@@ -239,8 +239,8 @@ export class QLTestAdapter extends DisposableObject implements TestAdapter {
const state = event.pass
? "passed"
: event.messages?.length
? "errored"
: "failed";
? "errored"
: "failed";
let message: string | undefined;
if (event.failureDescription || event.diff?.length) {
message =

View File

@@ -19,7 +19,7 @@ import { nanoid } from "nanoid";
import { CodeQLCliServer } from "./codeql-cli/cli";
import { SELECT_QUERY_NAME } from "./language-support";
import { DatabaseManager } from "./databases/local-databases";
import { DecodedBqrsChunk, EntityValue } from "./common/bqrs-cli-types";
import { DecodedBqrsChunk, BqrsEntityValue } from "./common/bqrs-cli-types";
import { BaseLogger, showAndLogWarningMessage } from "./common/logging";
import { extLogger } from "./common/logging/vscode";
import { generateSummarySymbolsFile } from "./log-insights/summary-parser";
@@ -287,7 +287,7 @@ export class QueryEvaluationInfo extends QueryOutputDir {
typeof v === "string" ? v.replaceAll('"', '""') : v
}"`;
} else if (chunk.columns[i].kind === "Entity") {
return (v as EntityValue).label;
return (v as BqrsEntityValue).label;
} else {
return v;
}

View File

@@ -45,18 +45,18 @@ select the **Dark+** theme. You can use **Preferences: Color Theme** in the *Com
4. Select **Developer: Open WebView Developer Tools**
5. Now, you will need to find the `<html>` element in the lowest-level `<iframe>`. See the image below:
<img src={iframeImage} />
<img src={iframeImage} alt="The iframe element showing in the VS Code webview developer tools element inspector" />
6. Once you have selected the `<html>` element as in the image above, click on **Show All Properties (... more)** (see image below). This will
expand all CSS variables.
<img src={stylesImage} />
<img src={stylesImage} alt="The styles tab of the VS Code webview developer tools element inspector" />
7. Copy all variables to the `src/stories/vscode-theme-dark.css` file.
8. Now, select the `<body>` element which is a direct child of the `<html>` element.
9. This time, you do not need to copy the variables. Instead, copy the styles on the `<body>` element to the `src/stories/vscode-theme-dark.css` file.
See the image below for which styles need to be copied.
<img src={bodyImage} />
<img src={bodyImage} alt="The styles on the body element showing in the VS Code webview developer tools element inspector" />
The same process can also be followed for updating the `src/stories/vscode-theme-light.css` file, but make sure to select the **Light+** theme.

View File

@@ -5,6 +5,7 @@ import { Meta, StoryFn } from "@storybook/react";
import CompareTableComponent from "../../view/compare/CompareTable";
import "../../view/results/resultsView.css";
import { ColumnKind } from "../../common/raw-result-types";
export default {
title: "Compare/Compare Table",
@@ -40,30 +41,38 @@ CompareTable.args = {
result: {
kind: "raw",
columns: [
{ name: "a", kind: "Entity" },
{ name: "b", kind: "Entity" },
{ name: "a", kind: ColumnKind.Entity },
{ name: "b", kind: ColumnKind.Entity },
],
from: [],
to: [
[
{
label: "url : String",
url: {
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 22,
startColumn: 27,
endLine: 22,
endColumn: 57,
type: "entity",
value: {
label: "url : String",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 22,
startColumn: 27,
endLine: 22,
endColumn: 57,
},
},
},
{
label: "url",
url: {
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 23,
startColumn: 33,
endLine: 23,
endColumn: 35,
type: "entity",
value: {
label: "url",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 23,
startColumn: 33,
endLine: 23,
endColumn: 35,
},
},
},
],

View File

@@ -1,24 +1,20 @@
{
"schema": {
"resultSet": {
"name": "#select",
"rows": 1,
"totalRowCount": 1,
"columns": [
{
"kind": "i"
"kind": "integer"
}
]
},
"resultSet": {
"schema": {
"name": "#select",
"rows": 1,
"columns": [
],
"rows": [
[
{
"kind": "i"
"type": "number",
"value": 60688
}
]
},
"rows": [[60688]]
]
},
"fileLinkPrefix": "https://github.com/facebook/create-react-app/blob/d960b9e38c062584ff6cfb1a70e1512509a966e7",
"sourceLocationPrefix": "/home/runner/work/bulk-builder/bulk-builder",

View File

@@ -47,26 +47,16 @@ MethodSaved.args = {
modelingStatus: "saved",
};
export const MultipleModelingsUnmodeled = Template.bind({});
MultipleModelingsUnmodeled.args = {
language,
method,
modeledMethods: [],
modelingStatus: "saved",
showMultipleModels: true,
};
export const MultipleModelingsModeledSingle = Template.bind({});
MultipleModelingsModeledSingle.args = {
export const ModeledSingle = Template.bind({});
ModeledSingle.args = {
language,
method,
modeledMethods: [createSinkModeledMethod(method)],
modelingStatus: "saved",
showMultipleModels: true,
};
export const MultipleModelingsModeledMultiple = Template.bind({});
MultipleModelingsModeledMultiple.args = {
export const ModeledMultiple = Template.bind({});
ModeledMultiple.args = {
language,
method,
modeledMethods: [
@@ -79,11 +69,10 @@ MultipleModelingsModeledMultiple.args = {
}),
],
modelingStatus: "saved",
showMultipleModels: true,
};
export const MultipleModelingsValidationFailedNeutral = Template.bind({});
MultipleModelingsValidationFailedNeutral.args = {
export const ValidationFailedNeutral = Template.bind({});
ValidationFailedNeutral.args = {
language,
method,
modeledMethods: [
@@ -91,11 +80,10 @@ MultipleModelingsValidationFailedNeutral.args = {
createNeutralModeledMethod(method),
],
modelingStatus: "unsaved",
showMultipleModels: true,
};
export const MultipleModelingsValidationFailedDuplicate = Template.bind({});
MultipleModelingsValidationFailedDuplicate.args = {
export const ValidationFailedDuplicate = Template.bind({});
ValidationFailedDuplicate.args = {
language,
method,
modeledMethods: [
@@ -108,5 +96,4 @@ MultipleModelingsValidationFailedDuplicate.args = {
createSinkModeledMethod(method),
],
modelingStatus: "unsaved",
showMultipleModels: true,
};

View File

@@ -216,7 +216,6 @@ LibraryRow.args = {
viewState: createMockModelEditorViewState({
showGenerateButton: true,
showLlmButton: true,
showMultipleModels: true,
}),
hideModeledMethods: false,
};

View File

@@ -6,10 +6,7 @@ import { Meta, StoryFn } from "@storybook/react";
import { MethodRow as MethodRowComponent } from "../../view/model-editor/MethodRow";
import { CallClassification, Method } from "../../model-editor/method";
import { ModeledMethod } from "../../model-editor/modeled-method";
import {
MULTIPLE_MODELS_GRID_TEMPLATE_COLUMNS,
SINGLE_MODEL_GRID_TEMPLATE_COLUMNS,
} from "../../view/model-editor/ModeledMethodDataGrid";
import { MULTIPLE_MODELS_GRID_TEMPLATE_COLUMNS } from "../../view/model-editor/ModeledMethodDataGrid";
import { DataGrid } from "../../view/common/DataGrid";
import { createMockModelEditorViewState } from "../../../test/factories/model-editor/view-state";
@@ -35,12 +32,8 @@ const Template: StoryFn<typeof MethodRowComponent> = (args) => {
[args],
);
const gridTemplateColumns = args.viewState?.showMultipleModels
? MULTIPLE_MODELS_GRID_TEMPLATE_COLUMNS
: SINGLE_MODEL_GRID_TEMPLATE_COLUMNS;
return (
<DataGrid gridTemplateColumns={gridTemplateColumns}>
<DataGrid gridTemplateColumns={MULTIPLE_MODELS_GRID_TEMPLATE_COLUMNS}>
<MethodRowComponent
{...args}
modeledMethods={modeledMethods}
@@ -63,6 +56,7 @@ const method: Method = {
{
label: "open(...)",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 14,
startColumn: 24,
@@ -74,6 +68,7 @@ const method: Method = {
{
label: "open(...)",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 25,
startColumn: 24,
@@ -100,7 +95,6 @@ const modeledMethod: ModeledMethod = {
const viewState = createMockModelEditorViewState({
showGenerateButton: true,
showLlmButton: true,
showMultipleModels: true,
});
export const Unmodeled = Template.bind({});

View File

@@ -30,7 +30,6 @@ ModelEditor.args = {
},
showGenerateButton: true,
showLlmButton: true,
showMultipleModels: true,
}),
initialMethods: [
{
@@ -112,6 +111,7 @@ ModelEditor.args = {
{
label: "println(...)",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 29,
startColumn: 9,
@@ -123,6 +123,7 @@ ModelEditor.args = {
{
label: "println(...)",
url: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/test/java/org/example/HelloControllerTest.java",
startLine: 29,
startColumn: 9,

View File

@@ -1,10 +1,13 @@
import * as React from "react";
import { Meta, StoryFn } from "@storybook/react";
import { action } from "@storybook/addon-actions";
import { AlertTable as AlertTableComponent } from "../../view/results/AlertTable";
import "../../view/results/resultsView.css";
import { AlertTableHeader } from "../../view/results/AlertTableHeader";
import { AlertTableNoResults } from "../../view/results/AlertTableNoResults";
export default {
title: "Results/Alert Table",
@@ -17,443 +20,417 @@ const Template: StoryFn<typeof AlertTableComponent> = (args) => (
export const WithoutCodeFlows = Template.bind({});
WithoutCodeFlows.args = {
resultSet: {
t: "InterpretedResultSet",
schema: { name: "alerts", rows: 1, columns: [] },
name: "alerts",
interpretation: {
data: {
version: "2.1.0",
runs: [
{
tool: { driver: { name: "" } },
results: [
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/Streams.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 98, startColumn: 35, endColumn: 37 },
},
},
],
partialFingerprints: {
primaryLocationLineHash: "1d25c2fbd979cbb:1",
primaryLocationStartColumnFingerprint: "30",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/Streams.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 99, startColumn: 35, endColumn: 37 },
},
},
],
partialFingerprints: {
primaryLocationLineHash: "5c5ed8d70236498a:1",
primaryLocationStartColumnFingerprint: "30",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 66,
startColumn: 33,
endLine: 68,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "bd306a1ab438981d:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 91,
startColumn: 33,
endLine: 93,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "b91980e3f3ee2a16:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/ReflectionAccessFilterHelper.java",
uriBaseId: "%SRCROOT%",
index: 2,
},
region: {
startLine: 100,
startColumn: 49,
endLine: 102,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "e4d69f1851f45b95:1",
primaryLocationStartColumnFingerprint: "40",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 112,
startColumn: 33,
endLine: 114,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "f3fb11daf511ebdb:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/DateTypeAdapter.java",
uriBaseId: "%SRCROOT%",
index: 3,
},
region: {
startLine: 84,
startColumn: 42,
endLine: 86,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "65a5e0f08a26f7fd:1",
primaryLocationStartColumnFingerprint: "33",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/DefaultDateTypeAdapter.java",
uriBaseId: "%SRCROOT%",
index: 4,
},
region: {
startLine: 157,
startColumn: 42,
endLine: 159,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "c7647299ca3416a7:1",
primaryLocationStartColumnFingerprint: "33",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/JsonTreeWriter.java",
uriBaseId: "%SRCROOT%",
index: 5,
},
region: {
startLine: 227,
startColumn: 52,
endLine: 228,
endColumn: 4,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "d86e48478bd5f82f:1",
primaryLocationStartColumnFingerprint: "49",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/stream/JsonReader.java",
uriBaseId: "%SRCROOT%",
index: 6,
},
region: {
startLine: 969,
startColumn: 47,
endLine: 971,
endColumn: 8,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "3bc8c477478d1d94:1",
primaryLocationStartColumnFingerprint: "40",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/stream/JsonReader.java",
uriBaseId: "%SRCROOT%",
index: 6,
},
region: {
startLine: 1207,
startColumn: 47,
endLine: 1209,
endColumn: 8,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "3bc8c477478d1d94:2",
primaryLocationStartColumnFingerprint: "40",
},
},
],
results: [
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/Streams.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 98, startColumn: 35, endColumn: 37 },
},
],
t: "SarifInterpretationData",
},
],
partialFingerprints: {
primaryLocationLineHash: "1d25c2fbd979cbb:1",
primaryLocationStartColumnFingerprint: "30",
},
sourceLocationPrefix: "/home/runner/work/gson/gson",
numTruncatedResults: 0,
numTotalResults: 11,
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/Streams.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 99, startColumn: 35, endColumn: 37 },
},
},
],
partialFingerprints: {
primaryLocationLineHash: "5c5ed8d70236498a:1",
primaryLocationStartColumnFingerprint: "30",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 66,
startColumn: 33,
endLine: 68,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "bd306a1ab438981d:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 91,
startColumn: 33,
endLine: 93,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "b91980e3f3ee2a16:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/ReflectionAccessFilterHelper.java",
uriBaseId: "%SRCROOT%",
index: 2,
},
region: {
startLine: 100,
startColumn: 49,
endLine: 102,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "e4d69f1851f45b95:1",
primaryLocationStartColumnFingerprint: "40",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/UnsafeAllocator.java",
uriBaseId: "%SRCROOT%",
index: 1,
},
region: {
startLine: 112,
startColumn: 33,
endLine: 114,
endColumn: 6,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "f3fb11daf511ebdb:1",
primaryLocationStartColumnFingerprint: "28",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/DateTypeAdapter.java",
uriBaseId: "%SRCROOT%",
index: 3,
},
region: {
startLine: 84,
startColumn: 42,
endLine: 86,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "65a5e0f08a26f7fd:1",
primaryLocationStartColumnFingerprint: "33",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/DefaultDateTypeAdapter.java",
uriBaseId: "%SRCROOT%",
index: 4,
},
region: {
startLine: 157,
startColumn: 42,
endLine: 159,
endColumn: 10,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "c7647299ca3416a7:1",
primaryLocationStartColumnFingerprint: "33",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/internal/bind/JsonTreeWriter.java",
uriBaseId: "%SRCROOT%",
index: 5,
},
region: {
startLine: 227,
startColumn: 52,
endLine: 228,
endColumn: 4,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "d86e48478bd5f82f:1",
primaryLocationStartColumnFingerprint: "49",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/stream/JsonReader.java",
uriBaseId: "%SRCROOT%",
index: 6,
},
region: {
startLine: 969,
startColumn: 47,
endLine: 971,
endColumn: 8,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "3bc8c477478d1d94:1",
primaryLocationStartColumnFingerprint: "40",
},
},
{
ruleId: "java/example/empty-block",
ruleIndex: 0,
rule: { id: "java/example/empty-block", index: 0 },
message: { text: "This is a empty block." },
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "gson/src/main/java/com/google/gson/stream/JsonReader.java",
uriBaseId: "%SRCROOT%",
index: 6,
},
region: {
startLine: 1207,
startColumn: 47,
endLine: 1209,
endColumn: 8,
},
},
},
],
partialFingerprints: {
primaryLocationLineHash: "3bc8c477478d1d94:2",
primaryLocationStartColumnFingerprint: "40",
},
},
],
sourceLocationPrefix: "/home/runner/work/gson/gson",
numTruncatedResults: 0,
databaseUri: "file:///a/b/c/java",
resultsPath: "file:///a/b/c/results.sarif",
nonemptyRawResults: true,
offset: 0,
header: <AlertTableHeader sortState={undefined} />,
noResults: (
<AlertTableNoResults
nonemptyRawResults={true}
showRawResults={() => action("show-raw-results")}
/>
),
};
export const WithCodeFlows = Template.bind({});
WithCodeFlows.args = {
resultSet: {
t: "InterpretedResultSet",
schema: { name: "alerts", rows: 1, columns: [] },
name: "alerts",
interpretation: {
data: {
version: "2.1.0",
runs: [
{
tool: { driver: { name: "" } },
results: [
{
ruleId: "java/sql-injection",
ruleIndex: 0,
rule: { id: "java/sql-injection", index: 0 },
message: {
text: "This query depends on a [user-provided value](1).",
},
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 15, startColumn: 29, endColumn: 56 },
},
},
],
partialFingerprints: {
primaryLocationLineHash: "87e2d3cc5b365094:1",
primaryLocationStartColumnFingerprint: "16",
},
codeFlows: [
{
threadFlows: [
{
locations: [
{
location: {
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: {
startLine: 13,
startColumn: 25,
endColumn: 54,
},
},
message: { text: "id : String" },
},
},
{
location: {
physicalLocation: {
artifactLocation: {
uri: "file:/",
index: 5,
},
region: {
startLine: 13,
startColumn: 25,
endColumn: 54,
},
},
message: { text: "id : String" },
},
},
{
location: {
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: {
startLine: 15,
startColumn: 29,
endColumn: 56,
},
},
message: { text: "... + ..." },
},
},
],
},
],
},
],
relatedLocations: [
{
id: 1,
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 13, startColumn: 25, endColumn: 54 },
},
message: { text: "user-provided value" },
},
],
},
],
},
],
t: "SarifInterpretationData",
results: [
{
ruleId: "java/sql-injection",
ruleIndex: 0,
rule: { id: "java/sql-injection", index: 0 },
message: {
text: "This query depends on a [user-provided value](1).",
},
sourceLocationPrefix: "/home/runner/work/sql2o-example/sql2o-example",
numTruncatedResults: 0,
numTotalResults: 1,
locations: [
{
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 15, startColumn: 29, endColumn: 56 },
},
},
],
partialFingerprints: {
primaryLocationLineHash: "87e2d3cc5b365094:1",
primaryLocationStartColumnFingerprint: "16",
},
codeFlows: [
{
threadFlows: [
{
locations: [
{
location: {
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: {
startLine: 13,
startColumn: 25,
endColumn: 54,
},
},
message: { text: "id : String" },
},
},
{
location: {
physicalLocation: {
artifactLocation: {
uri: "file:/",
index: 5,
},
region: {
startLine: 13,
startColumn: 25,
endColumn: 54,
},
},
message: { text: "id : String" },
},
},
{
location: {
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: {
startLine: 15,
startColumn: 29,
endColumn: 56,
},
},
message: { text: "... + ..." },
},
},
],
},
],
},
],
relatedLocations: [
{
id: 1,
physicalLocation: {
artifactLocation: {
uri: "src/main/java/org/example/HelloController.java",
uriBaseId: "%SRCROOT%",
index: 0,
},
region: { startLine: 13, startColumn: 25, endColumn: 54 },
},
message: { text: "user-provided value" },
},
],
},
},
],
sourceLocationPrefix: "/home/runner/work/sql2o-example/sql2o-example",
numTruncatedResults: 0,
databaseUri: "file:///a/b/c/java",
resultsPath: "file:///a/b/c/results.sarif",
nonemptyRawResults: true,
offset: 0,
header: <AlertTableHeader sortState={undefined} />,
noResults: (
<AlertTableNoResults
nonemptyRawResults={true}
showRawResults={() => action("show-raw-results")}
/>
),
};

View File

@@ -28,16 +28,6 @@ ResultTablesHeader.args = {
resultSetNames: ["#select", "alerts"],
resultSet: {
t: "InterpretedResultSet",
schema: {
name: "#select",
rows: 15,
columns: [
{
name: "x",
kind: "s",
},
],
},
name: "#select",
interpretation: {
sourceLocationPrefix: "/home/bulk-builder/bulk-builder",

View File

@@ -18,6 +18,7 @@ const Template: StoryFn<typeof ClickableLocationComponent> = (args) => (
export const ClickableLocation = Template.bind({});
ClickableLocation.args = {
loc: {
type: "lineColumnLocation",
uri: "file:/home/runner/work/sql2o-example/sql2o-example/src/main/java/org/example/HelloController.java",
startLine: 22,
startColumn: 27,

View File

@@ -14,6 +14,7 @@ import {
} from "../../variant-analysis/shared/variant-analysis";
import { createMockVariantAnalysis } from "../../../test/factories/variant-analysis/shared/variant-analysis";
import { createMockRepositoryWithMetadata } from "../../../test/factories/variant-analysis/shared/repository";
import { ColumnKind } from "../../common/raw-result-types";
export default {
title: "Variant Analysis/Variant Analysis",
@@ -207,26 +208,22 @@ const repoResults: VariantAnalysisScannedRepositoryResult[] = [
variantAnalysisId: 1,
repositoryId: 1,
rawResults: {
schema: {
resultSet: {
name: "#select",
rows: 1,
totalRowCount: 1,
columns: [
{
kind: "i",
kind: ColumnKind.Integer,
},
],
},
resultSet: {
schema: {
name: "#select",
rows: 1,
columns: [
rows: [
[
{
kind: "i",
type: "number",
value: 60688,
},
],
},
rows: [[60688]],
],
},
fileLinkPrefix:
"https://github.com/octodemo/hello-world-1/blob/59a2a6c7d9dde7a6ecb77c2f7e8197d6925c143b",

View File

@@ -1,9 +1,9 @@
import { CodeQLCliServer } from "../codeql-cli/cli";
import { Logger } from "../common/logging";
import { transformBqrsResultSet } from "../common/bqrs-cli-types";
import { AnalysisRawResults } from "./shared/analysis-result";
import { MAX_RAW_RESULTS } from "./shared/result-limits";
import { SELECT_TABLE_NAME } from "../common/interface-types";
import { bqrsToResultSet } from "../common/bqrs-raw-results-mapper";
export async function extractRawResults(
cliServer: CodeQLCliServer,
@@ -34,9 +34,9 @@ export async function extractRawResults(
pageSize: MAX_RAW_RESULTS,
});
const resultSet = transformBqrsResultSet(schema, chunk);
const resultSet = bqrsToResultSet(schema, chunk);
const capped = !!chunk.next;
return { schema, resultSet, fileLinkPrefix, sourceLocationPrefix, capped };
return { resultSet, fileLinkPrefix, sourceLocationPrefix, capped };
}

View File

@@ -45,9 +45,8 @@ export async function exportVariantAnalysisResults(
): Promise<void> {
await withProgress(
async (progress: ProgressCallback, token: CancellationToken) => {
const variantAnalysis = await variantAnalysisManager.getVariantAnalysis(
variantAnalysisId,
);
const variantAnalysis =
await variantAnalysisManager.getVariantAnalysis(variantAnalysisId);
if (!variantAnalysis) {
void extLogger.log(
`Could not find variant analysis with id ${variantAnalysisId}`,
@@ -61,9 +60,8 @@ export async function exportVariantAnalysisResults(
throw new UserCancellationException("Cancelled");
}
const repoStates = await variantAnalysisManager.getRepoStates(
variantAnalysisId,
);
const repoStates =
await variantAnalysisManager.getRepoStates(variantAnalysisId);
void extLogger.log(
`Exporting variant analysis results for variant analysis with id ${variantAnalysis.id}`,

View File

@@ -1,4 +1,3 @@
import { CellValue } from "../common/bqrs-cli-types";
import { tryGetRemoteLocation } from "../common/bqrs-utils";
import { createRemoteFileRef } from "../common/location-link-utils";
import {
@@ -19,6 +18,7 @@ import type {
VariantAnalysisScannedRepositoryResult,
} from "./shared/variant-analysis";
import type { RepositoryWithMetadata } from "./shared/repository";
import { CellValue } from "../common/raw-result-types";
type MarkdownLinkType = "local" | "gist";
@@ -298,9 +298,9 @@ function generateMarkdownForRawResults(
analysisRawResults: AnalysisRawResults,
): string[] {
const tableRows: string[] = [];
const columnCount = analysisRawResults.schema.columns.length;
const columnCount = analysisRawResults.resultSet.columns.length;
// Table headers are the column names if they exist, and empty otherwise
const headers = analysisRawResults.schema.columns.map(
const headers = analysisRawResults.resultSet.columns.map(
(column) => column.name || "",
);
const tableHeader = `| ${headers.join(" | ")} |`;
@@ -327,23 +327,25 @@ function generateMarkdownForRawTableCell(
sourceLocationPrefix: string,
) {
let cellValue: string;
switch (typeof value) {
switch (value.type) {
case "string":
case "number":
case "boolean":
cellValue = `\`${convertNonPrintableChars(value.toString())}\``;
cellValue = `\`${convertNonPrintableChars(value.value.toString())}\``;
break;
case "object":
case "entity":
{
const url = tryGetRemoteLocation(
value.url,
value.value.url,
fileLinkPrefix,
sourceLocationPrefix,
);
if (url) {
cellValue = `[\`${convertNonPrintableChars(value.label)}\`](${url})`;
cellValue = `[\`${convertNonPrintableChars(
value.value.label,
)}\`](${url})`;
} else {
cellValue = `\`${convertNonPrintableChars(value.label)}\``;
cellValue = `\`${convertNonPrintableChars(value.value.label)}\``;
}
}
break;

View File

@@ -18,10 +18,6 @@ export async function getRepositorySelection(
const selectedDbItem = dbManager.getSelectedDbItem();
if (selectedDbItem) {
switch (selectedDbItem.kind) {
case DbItemKind.LocalDatabase || DbItemKind.LocalList:
throw new UserCancellationException(
"Local databases and lists are not supported yet.",
);
case DbItemKind.RemoteSystemDefinedList:
return { repositoryLists: [selectedDbItem.listName] };
case DbItemKind.RemoteUserDefinedList:

View File

@@ -183,9 +183,8 @@ async function copyExistingQueryPack(
if (
await cliServer.cliConstraints.supportsGenerateExtensiblePredicateMetadata()
) {
const metadata = await cliServer.generateExtensiblePredicateMetadata(
originalPackRoot,
);
const metadata =
await cliServer.generateExtensiblePredicateMetadata(originalPackRoot);
metadata.extensible_predicates.forEach((predicate) => {
if (predicate.path.endsWith(".ql")) {
toCopy.push(join(originalPackRoot, predicate.path));

View File

@@ -1,7 +1,6 @@
import { RawResultSet, ResultSetSchema } from "../../common/bqrs-cli-types";
import { RawResultSet } from "../../common/raw-result-types";
export interface AnalysisRawResults {
schema: ResultSetSchema;
resultSet: RawResultSet;
fileLinkPrefix: string;
sourceLocationPrefix: string;

View File

@@ -21,9 +21,8 @@ export const createVariantAnalysisContentProvider = (
}
const variantAnalysisId = parseInt(variantAnalysisIdString);
const variantAnalysis = await variantAnalysisManager.getVariantAnalysis(
variantAnalysisId,
);
const variantAnalysis =
await variantAnalysisManager.getVariantAnalysis(variantAnalysisId);
if (!variantAnalysis) {
void showAndLogWarningMessage(
extLogger,

View File

@@ -16,7 +16,7 @@ import {
} from "./shared/variant-analysis";
import { DisposableObject, DisposeHandler } from "../common/disposable-object";
import { EventEmitter } from "vscode";
import { unzipFile } from "../common/zip";
import { unzipToDirectory } from "../common/unzip";
import { readRepoTask, writeRepoTask } from "./repo-tasks-store";
type CacheKey = `${number}/${string}`;
@@ -106,7 +106,7 @@ export class VariantAnalysisResultsManager extends DisposableObject {
VariantAnalysisResultsManager.RESULTS_DIRECTORY,
);
await unzipFile(zipFilePath, unzippedFilesDirectory);
await unzipToDirectory(zipFilePath, unzippedFilesDirectory);
this._onResultDownloaded.fire({
variantAnalysisId,
@@ -180,9 +180,8 @@ export class VariantAnalysisResultsManager extends DisposableObject {
repositoryFullName,
);
const repoTask: VariantAnalysisRepositoryTask = await readRepoTask(
storageDirectory,
);
const repoTask: VariantAnalysisRepositoryTask =
await readRepoTask(storageDirectory);
if (!repoTask.databaseCommitSha || !repoTask.sourceLocationPrefix) {
throw new Error("Missing database commit SHA");

View File

@@ -1,6 +1,6 @@
import * as React from "react";
import { render as reactRender, screen } from "@testing-library/react";
import userEvent from "@testing-library/user-event";
import { userEvent } from "@testing-library/user-event";
import { CodePaths, CodePathsProps } from "../CodePaths";
import { createMockCodeFlows } from "../../../../../test/factories/variant-analysis/shared/CodeFlow";

View File

@@ -39,7 +39,7 @@ export const DeterminateProgressRing = ({ percent }: Props) => (
aria-valuemax={100}
aria-valuenow={percent}
>
<svg className="progress" viewBox="0 0 16 16">
<svg className="progress" viewBox="0 0 16 16" role="presentation">
<Background cx="8px" cy="8px" r="7px" />
<Determinate
style={{

View File

@@ -1,6 +1,6 @@
import * as React from "react";
import { styled } from "styled-components";
import classNames from "classnames";
import * as classNames from "classnames";
type Props = {
name: string;

View File

@@ -1,6 +1,6 @@
import * as React from "react";
import { Codicon } from "./Codicon";
import classNames from "classnames";
import * as classNames from "classnames";
type Props = {
label?: string;

View File

@@ -9,6 +9,7 @@ import { vscode } from "../vscode-api";
import TextButton from "../common/TextButton";
import { styled } from "styled-components";
import { RawCompareResultTable } from "./RawCompareResultTable";
import { InterpretedCompareResultTable } from "./InterpretedCompareResultTable";
interface Props {
queryInfo: SetComparisonQueryInfoMessage;
@@ -76,6 +77,13 @@ export default function CompareTable({ queryInfo, comparison }: Props) {
className={className}
/>
)}
{result.kind === "interpreted" && (
<InterpretedCompareResultTable
results={result.from}
databaseUri={queryInfo.databaseUri}
sourceLocationPrefix={result.sourceLocationPrefix}
/>
)}
</td>
<td>
{result.kind === "raw" && (
@@ -87,6 +95,13 @@ export default function CompareTable({ queryInfo, comparison }: Props) {
className={className}
/>
)}
{result.kind === "interpreted" && (
<InterpretedCompareResultTable
results={result.to}
databaseUri={queryInfo.databaseUri}
sourceLocationPrefix={result.sourceLocationPrefix}
/>
)}
</td>
</tr>
</tbody>

Some files were not shown because too many files have changed in this diff Show More