Compare commits
368 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4499773f6f | ||
|
|
1d3b0e0ca9 | ||
|
|
98e503c768 | ||
|
|
62c3974d35 | ||
|
|
ab1c2e0a0d | ||
|
|
d918c41197 | ||
|
|
2cf5b39cfe | ||
|
|
13921bf8a2 | ||
|
|
12a97ecba2 | ||
|
|
26529232f4 | ||
|
|
1b425fc261 | ||
|
|
9c598c2f06 | ||
|
|
99a784f072 | ||
|
|
030488a459 | ||
|
|
377f7965b1 | ||
|
|
651a6fbda8 | ||
|
|
55ffdf7963 | ||
|
|
cc907d2f31 | ||
|
|
c4df9dbec8 | ||
|
|
c384a631dc | ||
|
|
b079690f0e | ||
|
|
4e863e995b | ||
|
|
f992679e94 | ||
|
|
ffe1704ac0 | ||
|
|
bd2dd04ac6 | ||
|
|
bbf4a03b03 | ||
|
|
f38eb4895d | ||
|
|
f559b59ee5 | ||
|
|
c9d895ea42 | ||
|
|
e57bbcb711 | ||
|
|
b311991644 | ||
|
|
825054a271 | ||
|
|
f7aa0a5ae5 | ||
|
|
f486ccfac6 | ||
|
|
70f74d3baf | ||
|
|
ebad1844df | ||
|
|
a40a2edaf2 | ||
|
|
5f3d525ff8 | ||
|
|
8f5d88156f | ||
|
|
7c941fe8a8 | ||
|
|
e9835cb376 | ||
|
|
7651a960b1 | ||
|
|
5b17a84733 | ||
|
|
22873a2f3c | ||
|
|
2debadd3bf | ||
|
|
6808d7dcaf | ||
|
|
3480aa5495 | ||
|
|
a4d1ad57c7 | ||
|
|
628e0e924d | ||
|
|
16077f4124 | ||
|
|
e6a68b3223 | ||
|
|
539a494914 | ||
|
|
9c29c5c9c6 | ||
|
|
fd4b6022a9 | ||
|
|
58bbb59e39 | ||
|
|
5cc55530e1 | ||
|
|
3d74dbf48a | ||
|
|
b7489d8f66 | ||
|
|
e0b2aa9b45 | ||
|
|
10b4c15053 | ||
|
|
8bc83a336a | ||
|
|
c84b858205 | ||
|
|
e5f3a973a0 | ||
|
|
3682f05a42 | ||
|
|
eb5ce029ba | ||
|
|
0ebff2d6e6 | ||
|
|
d061634fe3 | ||
|
|
6b9410c67e | ||
|
|
8245e54e9c | ||
|
|
8ee744ef0c | ||
|
|
da179b2580 | ||
|
|
0714f06adc | ||
|
|
b2906257a1 | ||
|
|
18097e4676 | ||
|
|
efcade84c6 | ||
|
|
7f27375d17 | ||
|
|
01e1f134be | ||
|
|
0695b0557f | ||
|
|
c63f0c0833 | ||
|
|
3264ffaaa4 | ||
|
|
40959c8876 | ||
|
|
ecea7f4638 | ||
|
|
0b15a166fa | ||
|
|
c368424a15 | ||
|
|
5df1f80307 | ||
|
|
4b59045149 | ||
|
|
a3a05131c7 | ||
|
|
a9922b86fe | ||
|
|
431350ac0e | ||
|
|
5f8802fe7f | ||
|
|
5f21594d23 | ||
|
|
8964ec1a4d | ||
|
|
aa270e57ec | ||
|
|
fe7eb07f39 | ||
|
|
c10da7f960 | ||
|
|
0c8390c094 | ||
|
|
d41c63bf7d | ||
|
|
a3bbdafabb | ||
|
|
a78eef464b | ||
|
|
e8348ac12a | ||
|
|
5efc3835db | ||
|
|
c4ed6e88de | ||
|
|
51e6559145 | ||
|
|
db8b419885 | ||
|
|
475d7cc535 | ||
|
|
1858de5ed0 | ||
|
|
642f4788fb | ||
|
|
7e70f8b758 | ||
|
|
e417bea948 | ||
|
|
6b4be93169 | ||
|
|
061eaad743 | ||
|
|
8ff21d6c89 | ||
|
|
0d9f4e8c0f | ||
|
|
02288718dc | ||
|
|
615cf86fc0 | ||
|
|
d63a209674 | ||
|
|
9d26304f7a | ||
|
|
f73bda438a | ||
|
|
19b65a654e | ||
|
|
770127e67a | ||
|
|
f373e6467a | ||
|
|
e43b4e66a1 | ||
|
|
90ec003386 | ||
|
|
2f9aca785e | ||
|
|
405a6c9901 | ||
|
|
3611b1fe61 | ||
|
|
7b33441519 | ||
|
|
2a8f61dfbe | ||
|
|
dcfd6d43c0 | ||
|
|
4e4d8b2f04 | ||
|
|
50197ba7b7 | ||
|
|
6c376d8721 | ||
|
|
82ada54103 | ||
|
|
0fdfeb3cd3 | ||
|
|
096d7719c6 | ||
|
|
619c485224 | ||
|
|
9367d5fb45 | ||
|
|
50ec97ad91 | ||
|
|
fa5fcde987 | ||
|
|
5b33333404 | ||
|
|
cf50624e4e | ||
|
|
ccc9ed8b49 | ||
|
|
141f5381e7 | ||
|
|
be054ca4f8 | ||
|
|
0a06452450 | ||
|
|
b840d3f9bf | ||
|
|
c829c30688 | ||
|
|
7947afb1b4 | ||
|
|
c32b53613d | ||
|
|
c058e7a128 | ||
|
|
1dc663339d | ||
|
|
351db4efc8 | ||
|
|
12d6ea3966 | ||
|
|
e1adc7b428 | ||
|
|
dc34adadcd | ||
|
|
6e06381640 | ||
|
|
f55389cd26 | ||
|
|
6d930f53ba | ||
|
|
f7616cf685 | ||
|
|
f55d9820bd | ||
|
|
befc2cddd2 | ||
|
|
ef268e043f | ||
|
|
e10d2aef8e | ||
|
|
a97c5fe836 | ||
|
|
9b6eddddae | ||
|
|
ed84825e65 | ||
|
|
cb84003c31 | ||
|
|
a1cd87aa3a | ||
|
|
7d3b015e20 | ||
|
|
7d0d11f526 | ||
|
|
eb2520e7ca | ||
|
|
2675bf464e | ||
|
|
b638449498 | ||
|
|
e12bf63f9a | ||
|
|
ffcc1f82f1 | ||
|
|
04d7b12dd8 | ||
|
|
3e33b00a75 | ||
|
|
12dc378fc1 | ||
|
|
bbe99f4451 | ||
|
|
91b17f8fa6 | ||
|
|
69f1778309 | ||
|
|
c55e801d00 | ||
|
|
b363f77a83 | ||
|
|
f55f46f95b | ||
|
|
5ee2f0efe1 | ||
|
|
1314a36ba4 | ||
|
|
2b8b621298 | ||
|
|
aed4c9fc58 | ||
|
|
604001dfb1 | ||
|
|
1a03c0e4ac | ||
|
|
a8c54b7640 | ||
|
|
9bb60c9474 | ||
|
|
0b2ce7a071 | ||
|
|
44145baca7 | ||
|
|
dac7881ca3 | ||
|
|
31bd927959 | ||
|
|
46922de3c0 | ||
|
|
908a862dd1 | ||
|
|
6676ba99d0 | ||
|
|
6d3c6e598f | ||
|
|
e1a10fc827 | ||
|
|
2ebdbaafa3 | ||
|
|
a74dfea08b | ||
|
|
44ff380c86 | ||
|
|
0a41713253 | ||
|
|
f5a5675da4 | ||
|
|
7a8cf55090 | ||
|
|
7932de3b7d | ||
|
|
c8ba967a54 | ||
|
|
f5d2f0e0ca | ||
|
|
2c7e2f4b7f | ||
|
|
ee3ebe687b | ||
|
|
77024f0757 | ||
|
|
c0e39886eb | ||
|
|
6339e7897d | ||
|
|
783a8a8772 | ||
|
|
8f2d865999 | ||
|
|
d6d0825926 | ||
|
|
37de2e7f52 | ||
|
|
800c9e0c93 | ||
|
|
a1bc7eb4d5 | ||
|
|
8ff45d2aee | ||
|
|
8ec19777b5 | ||
|
|
3e388fedeb | ||
|
|
83ffba2f08 | ||
|
|
f1c4fef8ba | ||
|
|
eec506a209 | ||
|
|
2ca0060c6a | ||
|
|
8b2d79a7f7 | ||
|
|
c4db8b6d4b | ||
|
|
61d4305593 | ||
|
|
542e1d24aa | ||
|
|
47ec074cfb | ||
|
|
e44835e795 | ||
|
|
2e28146a58 | ||
|
|
85e051a76d | ||
|
|
7027a61e63 | ||
|
|
e8c5b27d92 | ||
|
|
a3deec7875 | ||
|
|
6282a462c8 | ||
|
|
dac5952e96 | ||
|
|
ada6fcb908 | ||
|
|
8d2f902420 | ||
|
|
fc3fe7a81e | ||
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
14608fe5f7 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 | ||
|
|
3f2c9b647c | ||
|
|
7d5b4369c1 | ||
|
|
aade33fa88 | ||
|
|
2a8a90bdfc | ||
|
|
f36048cc95 | ||
|
|
517feeca21 | ||
|
|
9436a49118 | ||
|
|
0e02cb08fd | ||
|
|
26244efc50 | ||
|
|
6339eeffe5 | ||
|
|
8cc2f598eb | ||
|
|
46a1dd57f4 | ||
|
|
9d99fc521e | ||
|
|
bcf79354ee | ||
|
|
27a8636bac | ||
|
|
92a99938c9 | ||
|
|
ed61eb0a95 | ||
|
|
50d495b522 | ||
|
|
526d5c2c44 | ||
|
|
1720f9201e | ||
|
|
e62de1ca22 | ||
|
|
d052ddb742 | ||
|
|
af53a02ea5 | ||
|
|
8e2d18da8c | ||
|
|
2c5004387d | ||
|
|
3fc3b259ba | ||
|
|
cd95f68692 | ||
|
|
59c3b1ba2f | ||
|
|
fa85865fe5 |
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "extensions/ql-vscode"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: ".github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
16
.github/workflows/dependency-review.yml
vendored
Normal file
16
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: 'Dependency Review'
|
||||
on:
|
||||
- pull_request
|
||||
- workflow_dispatch
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v1
|
||||
9
.github/workflows/main.yml
vendored
9
.github/workflows/main.yml
vendored
@@ -118,6 +118,8 @@ jobs:
|
||||
- name: Run integration tests (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
sudo apt-get install xvfb
|
||||
/usr/bin/xvfb-run npm run integration
|
||||
@@ -125,6 +127,8 @@ jobs:
|
||||
- name: Run integration tests (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
npm run integration
|
||||
|
||||
@@ -135,7 +139,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.2', 'nightly']
|
||||
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.0', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
@@ -168,9 +172,6 @@ jobs:
|
||||
if [[ "${{ matrix.version }}" == "nightly" ]]
|
||||
then
|
||||
REF="codeql-cli/latest"
|
||||
elif [[ "${{ matrix.version }}" == "v2.2.6" || "${{ matrix.version }}" == "v2.3.3" ]]
|
||||
then
|
||||
REF="codeql-cli/v2.4.5"
|
||||
else
|
||||
REF="codeql-cli/${{ matrix.version }}"
|
||||
fi
|
||||
|
||||
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -12,7 +12,6 @@
|
||||
// Add a reference to a workspace to open. Eg-
|
||||
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -46,7 +45,6 @@
|
||||
"ts-node/register",
|
||||
"test/pure-tests/**/*.ts"
|
||||
],
|
||||
"port": 9229,
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
@@ -60,10 +58,10 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -77,11 +75,11 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu",
|
||||
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -95,6 +93,7 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-gpu",
|
||||
"--disable-extension",
|
||||
"eamodio.gitlens",
|
||||
@@ -121,7 +120,6 @@
|
||||
// This option overrides the CLI_VERSION option.
|
||||
// "CLI_PATH": "${workspaceRoot}/../semmle-code/target/intree/codeql/codeql",
|
||||
},
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
|
||||
@@ -29,7 +29,9 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
|
||||
## Setting up a local build
|
||||
|
||||
Make sure you have installed recent versions of vscode (>= v1.52), node (>=12.16), and npm (>= 7.5.2). Earlier versions will probably work, but we no longer test against them.
|
||||
Make sure you have installed recent versions of vscode, node, and npm. Check the `engines` block in [`package.json`](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/package.json) file for compatible versions. Earlier versions may work, but we no longer test against them.
|
||||
|
||||
To automatically switch to the correct version of node, we recommend using [nvm](https://github.com/nvm-sh/nvm), which will pick-up the node version from `.nvmrc`.
|
||||
|
||||
### Installing all packages
|
||||
|
||||
@@ -56,8 +58,6 @@ We recommend that you keep `npm run watch` running in the backgound and you only
|
||||
|
||||
1. on first checkout
|
||||
2. whenever any of the non-TypeScript resources have changed
|
||||
3. on any change to files included in one of the webviews
|
||||
- **Important**: This is easy to forget. You must explicitly run `npm run build` whenever one of the files in the webview is changed. These are the files in the `src/view` and `src/compare/view` folders.
|
||||
|
||||
### Installing the extension
|
||||
|
||||
@@ -95,15 +95,21 @@ Running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point
|
||||
|
||||
### Running the integration tests
|
||||
|
||||
The _Launch Integration Tests - With CLI_ tests require a CLI instance in order to run. There are several environment variables you can use to configure this.
|
||||
You will need to run CLI tests using a task from inside of VS Code called _Launch Integration Tests - With CLI_.
|
||||
|
||||
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ uncomment and change the environment variables appropriate for your purpose.
|
||||
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
|
||||
|
||||
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ task, uncomment the `"${workspaceRoot}/../codeql"` line. If necessary, replace value with a path to your checkout, and then run the task.
|
||||
|
||||
## Releasing (write access required)
|
||||
|
||||
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
||||
* Go through all recent PRs and make sure they are properly accounted for.
|
||||
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||
1. Double-check that the node version we're using matches the one used for VS Code. If it doesn't, you will then need to update the node version in the following files:
|
||||
* `.nvmrc` - this will enable `nvm` to automatically switch to the correct node version when you're in the project folder
|
||||
* `.github/workflows/main.yml` - all the "node-version: <version>" settings
|
||||
* `.github/workflows/release.yml` - the "node-version: <version>" setting
|
||||
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
||||
1. Create a PR for this release:
|
||||
* This PR will contain any missing bits from steps 1 and 2. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||
@@ -111,19 +117,39 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
|
||||
* Create a new commit with a message the same as the branch name.
|
||||
* Create a PR for this branch.
|
||||
* Wait for the PR to be merged into `main`
|
||||
1. Trigger a release build on Actions by adding a new tag on branch `main` named after the release, as above. Note that when you push to upstream, you will need to fully qualify the ref. A command like this will work:
|
||||
1. Switch to `main` and add a new tag on the `main` branch with your new version (named after the release), e.g.
|
||||
```bash
|
||||
git checkout main
|
||||
git tag v1.3.6
|
||||
```
|
||||
|
||||
If you've accidentally created a badly named tag, you can delete it via
|
||||
```bash
|
||||
git tag -d badly-named-tag
|
||||
```
|
||||
1. Push the new tag up:
|
||||
|
||||
a. If you're using a fork of the repo:
|
||||
|
||||
```bash
|
||||
git push upstream refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
b. If you're working straight in this repo:
|
||||
|
||||
```bash
|
||||
git push origin refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
This will trigger [a release build](https://github.com/github/vscode-codeql/releases) on Actions.
|
||||
|
||||
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
||||
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
||||
|
||||
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
@@ -143,12 +169,7 @@ To regenerate the Open VSX token:
|
||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||
|
||||
To regenerate the VSCode Marketplace token:
|
||||
|
||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
||||
|
||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
||||
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
1
extensions/ql-vscode/.nvmrc
Normal file
1
extensions/ql-vscode/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
v16.13.0
|
||||
@@ -1,5 +1,44 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.6.9 - 20 July 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.8 - 29 June 2022
|
||||
|
||||
- Fix a bug where quick queries cannot be compiled if the core libraries are not in the workspace. [#1411](https://github.com/github/vscode-codeql/pull/1411)
|
||||
- Fix a bug where quick evaluation of library files would display an error message when using CodeQL CLI v2.10.0. [#1412](https://github.com/github/vscode-codeql/pull/1412)
|
||||
|
||||
## 1.6.7 - 15 June 2022
|
||||
|
||||
- Prints end-of-query evaluator log summaries to the Query Log. [#1349](https://github.com/github/vscode-codeql/pull/1349)
|
||||
- Be consistent about casing in Query History menu. [#1369](https://github.com/github/vscode-codeql/pull/1369)
|
||||
- Fix quoting string columns in exported CSV results. [#1379](https://github.com/github/vscode-codeql/pull/1379)
|
||||
|
||||
## 1.6.6 - 17 May 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.5 - 25 April 2022
|
||||
|
||||
- Re-enable publishing to open-vsx. [#1285](https://github.com/github/vscode-codeql/pull/1285)
|
||||
|
||||
## 1.6.4 - 6 April 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.3 - 4 April 2022
|
||||
|
||||
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||
|
||||
## 1.6.1 - 17 March 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.0 - 7 March 2022
|
||||
|
||||
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
|
||||
@@ -7,6 +46,7 @@
|
||||
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||
|
||||
## 1.5.11 - 10 February 2022
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as replace from 'gulp-replace';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require('gulp-replace');
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
export function injectAppInsightsKey() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as jsonc from 'jsonc-parser';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface DeployedPackage {
|
||||
@@ -28,7 +27,7 @@ async function copyPackage(sourcePath: string, destPath: string): Promise<void>
|
||||
|
||||
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
||||
try {
|
||||
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Default to development build; use flag --release to indicate release build.
|
||||
const isDevBuild = !process.argv.includes('--release');
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput, watchCss } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView } from './webpack';
|
||||
import { compileView, watchView } from './webpack';
|
||||
import { packageExtension } from './package';
|
||||
import { injectAppInsightsKey } from './appInsights';
|
||||
|
||||
@@ -14,5 +14,15 @@ export const buildWithoutPackage =
|
||||
)
|
||||
);
|
||||
|
||||
export { cleanOutput, compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchTypeScript,
|
||||
watchView,
|
||||
compileTypeScript,
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
watchCss
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -219,14 +219,14 @@ function transformFile(yaml: any) {
|
||||
}
|
||||
|
||||
export function transpileTextMateGrammar() {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: Function): void => {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: (err: string | null, file: Vinyl | PluginError) => void): void => {
|
||||
if (file.isNull()) {
|
||||
callback(null, file);
|
||||
}
|
||||
else if (file.isBuffer()) {
|
||||
const buf: Buffer = file.contents;
|
||||
const yamlText: string = buf.toString('utf8');
|
||||
const jsonData: any = jsYaml.safeLoad(yamlText);
|
||||
const jsonData: any = jsYaml.load(yamlText);
|
||||
transformFile(jsonData);
|
||||
|
||||
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
"noImplicitReturns": true,
|
||||
"experimentalDecorators": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,10 @@ export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
export function watchCss() {
|
||||
gulp.watch('src/**/*.css', copyViewCss);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/**/view/*.css')
|
||||
|
||||
@@ -2,7 +2,23 @@ import * as webpack from 'webpack';
|
||||
import { config } from './webpack.config';
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
webpack(config).run((error, stats) => {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
}
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
@@ -20,11 +36,16 @@ export function compileView(cb: (err?: Error) => void) {
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
if (failOnError) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
} else {
|
||||
console.error('Compilation errors detected.');
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
|
||||
4
extensions/ql-vscode/media/dark/github.svg
Normal file
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
4615
extensions/ql-vscode/package-lock.json
generated
4615
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.6.0",
|
||||
"version": "1.6.9",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -14,15 +14,14 @@
|
||||
},
|
||||
"engines": {
|
||||
"vscode": "^1.59.0",
|
||||
"node": "^14.17.1",
|
||||
"npm": "^7.20.6"
|
||||
"node": "^16.13.0",
|
||||
"npm": ">=7.20.6"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
],
|
||||
"extensionDependencies": [
|
||||
"hbenl.vscode-test-explorer",
|
||||
"ms-vscode.test-adapter-converter"
|
||||
"hbenl.vscode-test-explorer"
|
||||
],
|
||||
"capabilities": {
|
||||
"untrustedWorkspaces": {
|
||||
@@ -45,6 +44,7 @@
|
||||
"onCommand:codeQLDatabases.chooseDatabaseFolder",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseArchive",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseInternet",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseGithub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
@@ -54,6 +54,7 @@
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
"onCommand:codeQL.chooseDatabaseArchive",
|
||||
"onCommand:codeQL.chooseDatabaseInternet",
|
||||
"onCommand:codeQL.chooseDatabaseGithub",
|
||||
"onCommand:codeQL.chooseDatabaseLgtm",
|
||||
"onCommand:codeQLDatabases.chooseDatabase",
|
||||
"onCommand:codeQLDatabases.setCurrentDatabase",
|
||||
@@ -134,7 +135,7 @@
|
||||
"title": "CodeQL",
|
||||
"properties": {
|
||||
"codeQL.cli.executablePath": {
|
||||
"scope": "window",
|
||||
"scope": "machine-overridable",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"markdownDescription": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. If empty, the extension will look for a CodeQL executable on your shell PATH, or if CodeQL is not on your PATH, download and manage its own CodeQL executable."
|
||||
@@ -223,7 +224,7 @@
|
||||
},
|
||||
"codeQL.queryHistory.format": {
|
||||
"type": "string",
|
||||
"default": "%q on %d - %s, %r result count [%t]",
|
||||
"default": "%q on %d - %s %r [%t]",
|
||||
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
|
||||
},
|
||||
"codeQL.queryHistory.ttl": {
|
||||
@@ -258,7 +259,7 @@
|
||||
"scope": "application",
|
||||
"description": "Specifies whether or not to write telemetry events to the extension log."
|
||||
},
|
||||
"codeQL.remoteQueries.repositoryLists": {
|
||||
"codeQL.variantAnalysis.repositoryLists": {
|
||||
"type": [
|
||||
"object",
|
||||
null
|
||||
@@ -272,14 +273,14 @@
|
||||
}
|
||||
},
|
||||
"default": null,
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to query remotely. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to run variant analysis against. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
},
|
||||
"codeQL.remoteQueries.controllerRepo": {
|
||||
"codeQL.variantAnalysis.controllerRepo": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
|
||||
"patternErrorMessage": "Please enter a valid GitHub repository",
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Remote query\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -297,12 +298,12 @@
|
||||
"title": "CodeQL: Run Query on Multiple Databases"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"title": "CodeQL: Run Remote Query"
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"title": "CodeQL: Run Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"title": "CodeQL: [Internal] Show fake remote query results"
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"title": "CodeQL: Export Variant Analysis Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
@@ -360,6 +361,14 @@
|
||||
"dark": "media/dark/cloud-download.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"title": "Download Database from GitHub",
|
||||
"icon": {
|
||||
"light": "media/light/github.svg",
|
||||
"dark": "media/dark/github.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"title": "Download from LGTM",
|
||||
@@ -432,6 +441,10 @@
|
||||
"command": "codeQL.chooseDatabaseInternet",
|
||||
"title": "CodeQL: Download Database"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"title": "CodeQL: Download Database from GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"title": "CodeQL: Download Database from LGTM"
|
||||
@@ -458,7 +471,7 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQuery",
|
||||
"title": "Open the query that produced these results",
|
||||
"title": "Open the Query that Produced these Results",
|
||||
"icon": {
|
||||
"light": "media/light/edit.svg",
|
||||
"dark": "media/dark/edit.svg"
|
||||
@@ -510,7 +523,15 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"title": "Open query directory"
|
||||
"title": "Open Query Directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
@@ -520,6 +541,10 @@
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"title": "Show Query Text"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"title": "Export Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"title": "View Results (CSV)"
|
||||
@@ -546,7 +571,11 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"title": "Open Remote Query on GitHub"
|
||||
"title": "Open Variant Analysis on GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"title": "Copy Repository List"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.nextPathStep",
|
||||
@@ -608,6 +637,11 @@
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "view == codeQLDatabases",
|
||||
@@ -710,11 +744,26 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && !hasRemoteServer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"group": "9_qlCommands",
|
||||
@@ -743,7 +792,12 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem"
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.showOutputDifferences",
|
||||
@@ -802,11 +856,11 @@
|
||||
"when": "resourceLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
@@ -837,6 +891,10 @@
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -881,6 +939,10 @@
|
||||
"command": "codeQLDatabases.chooseDatabaseInternet",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "false"
|
||||
@@ -905,6 +967,14 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
@@ -917,10 +987,18 @@
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"when": "false"
|
||||
@@ -984,7 +1062,7 @@
|
||||
"when": "editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
@@ -1053,6 +1131,8 @@
|
||||
"build": "gulp",
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:css": "gulp watchCss",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
@@ -1064,22 +1144,27 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.5.6",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@primer/octicons-react": "^16.3.0",
|
||||
"@primer/react": "^34.3.0",
|
||||
"@primer/react": "^35.0.0",
|
||||
"@vscode/codicons": "^0.0.31",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.0",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"d3": "^6.3.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^9.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"minimist": "~1.2.5",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^4.2.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
"node-fetch": "~2.6.7",
|
||||
"path-browserify": "^1.0.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"semver": "~7.3.2",
|
||||
"source-map": "^0.7.4",
|
||||
"source-map-support": "^0.5.21",
|
||||
"stream": "^0.0.2",
|
||||
"stream-chain": "~2.2.4",
|
||||
"stream-json": "~1.7.3",
|
||||
@@ -1093,21 +1178,21 @@
|
||||
"vscode-languageclient": "^6.1.3",
|
||||
"vscode-test-adapter-api": "~1.7.0",
|
||||
"vscode-test-adapter-util": "~0.7.0",
|
||||
"zip-a-folder": "~0.0.12"
|
||||
"zip-a-folder": "~1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "0.0.31",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
@@ -1128,7 +1213,7 @@
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^4.32.1",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
@@ -1142,39 +1227,38 @@
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.0.0",
|
||||
"gulp-sourcemaps": "^2.6.5",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"jsonc-parser": "^2.3.0",
|
||||
"lint-staged": "~10.2.2",
|
||||
"mocha": "^9.1.3",
|
||||
"mocha": "^10.0.0",
|
||||
"mocha-sinon": "~2.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~9.0.0",
|
||||
"sinon": "~13.0.1",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~0.23.1",
|
||||
"through2": "^3.0.1",
|
||||
"style-loader": "~3.3.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^8.3.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "^4.3.2",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.65.0",
|
||||
"vsce": "^2.7.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^5.28.0",
|
||||
"webpack": "^5.62.2",
|
||||
"webpack-cli": "^4.6.0"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run format-staged",
|
||||
"pre-push": "npm run lint"
|
||||
"pre-push": "npm run lint && scripts/forbid-mocha-only"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/*.{json,css,scss,md}": [
|
||||
"./**/*.{json,css,scss}": [
|
||||
"prettier --write"
|
||||
],
|
||||
"./**/*.{ts,tsx}": [
|
||||
@@ -1183,6 +1267,6 @@
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"glob-parent": "~6.0.0"
|
||||
"glob-parent": "6.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
@@ -0,0 +1,6 @@
|
||||
if grep -rq --include '*.test.ts' 'it.only\|describe.only' './test' './src'; then
|
||||
echo 'There is a .only() in the tests. Please remove it.'
|
||||
exit 1;
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
TextEditorSelectionChangeEvent,
|
||||
TextEditorSelectionChangeKind,
|
||||
Location,
|
||||
Range
|
||||
Range,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -104,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
export class AstViewer extends DisposableObject {
|
||||
private treeView: TreeView<AstItem>;
|
||||
private treeDataProvider: AstViewerDataProvider;
|
||||
private currentFile: string | undefined;
|
||||
private currentFileUri: Uri | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -125,12 +126,12 @@ export class AstViewer extends DisposableObject {
|
||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||
}
|
||||
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
||||
this.currentFile = fileName;
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
@@ -174,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
if (
|
||||
this.treeView.visible &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||
e.selections.length === 1
|
||||
) {
|
||||
const selection = e.selections[0];
|
||||
@@ -199,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.db = undefined;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
this.currentFile = undefined;
|
||||
this.currentFileUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as Octokit from '@octokit/rest';
|
||||
import { retry } from '@octokit/plugin-retry';
|
||||
|
||||
const GITHUB_AUTH_PROVIDER_ID = 'github';
|
||||
|
||||
// 'repo' scope should be enough for triggering workflows. For a comprehensive list, see:
|
||||
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
|
||||
// For a comprehensive list of scopes, see:
|
||||
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
|
||||
const SCOPES = ['repo'];
|
||||
const SCOPES = ['repo', 'gist'];
|
||||
|
||||
/**
|
||||
/**
|
||||
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
|
||||
*/
|
||||
export class Credentials {
|
||||
@@ -18,6 +20,15 @@ export class Credentials {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
private constructor() { }
|
||||
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance.
|
||||
*
|
||||
* Do not call this method until you know you actually need an instance of credentials.
|
||||
* since calling this method will require the user to log in.
|
||||
*
|
||||
* @param context The extension context.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
|
||||
const c = new Credentials();
|
||||
c.registerListeners(context);
|
||||
@@ -25,12 +36,31 @@ export class Credentials {
|
||||
return c;
|
||||
}
|
||||
|
||||
private async createOctokit(createIfNone: boolean): Promise<Octokit.Octokit | undefined> {
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance using
|
||||
* a token from the user's GitHub account. This method is meant to be
|
||||
* used non-interactive environments such as tests.
|
||||
*
|
||||
* @param overrideToken The GitHub token to use for authentication.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initializeWithToken(overrideToken: string) {
|
||||
const c = new Credentials();
|
||||
c.octokit = await c.createOctokit(false, overrideToken);
|
||||
return c;
|
||||
}
|
||||
|
||||
private async createOctokit(createIfNone: boolean, overrideToken?: string): Promise<Octokit.Octokit | undefined> {
|
||||
if (overrideToken) {
|
||||
return new Octokit.Octokit({ auth: overrideToken, retry });
|
||||
}
|
||||
|
||||
const session = await vscode.authentication.getSession(GITHUB_AUTH_PROVIDER_ID, SCOPES, { createIfNone });
|
||||
|
||||
if (session) {
|
||||
return new Octokit.Octokit({
|
||||
auth: session.accessToken
|
||||
auth: session.accessToken,
|
||||
retry
|
||||
});
|
||||
} else {
|
||||
return undefined;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from 'semver';
|
||||
import { runCodeQlCliCommand } from './cli';
|
||||
import { Logger } from './logging';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Get the version of a CodeQL CLI.
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,12 +8,12 @@ import { Readable } from 'stream';
|
||||
import { StringDecoder } from 'string_decoder';
|
||||
import * as tk from 'tree-kill';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken, Disposable, Uri } from 'vscode';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
@@ -346,7 +346,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
stderrBuffers.length == 0
|
||||
? new Error(`${description} failed: ${err}`)
|
||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||
newError.stack += (err.stack || '');
|
||||
newError.stack += getErrorStack(err);
|
||||
throw newError;
|
||||
} finally {
|
||||
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
@@ -448,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
yield JSON.parse(event) as EventType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -503,7 +503,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
return JSON.parse(result) as OutputType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -604,10 +604,14 @@ export class CodeQLCliServer implements Disposable {
|
||||
}
|
||||
|
||||
/** Resolves the ML models that should be available when evaluating a query. */
|
||||
async resolveMlModels(additionalPacks: string[]): Promise<MlModelsInfo> {
|
||||
async resolveMlModels(additionalPacks: string[], queryPath: string): Promise<MlModelsInfo> {
|
||||
const args = await this.cliConstraints.supportsPreciseResolveMlModels()
|
||||
// use the dirname of the path so that we can handle query libraries
|
||||
? [...this.getAdditionalPacksArg(additionalPacks), path.dirname(queryPath)]
|
||||
: this.getAdditionalPacksArg(additionalPacks);
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||
['resolve', 'ml-models'],
|
||||
this.getAdditionalPacksArg(additionalPacks),
|
||||
args,
|
||||
'Resolving ML models',
|
||||
false
|
||||
);
|
||||
@@ -665,6 +669,43 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary of an evaluation log.
|
||||
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a human-readable summary of it to.
|
||||
*/
|
||||
async generateLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
endSummaryPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a JSON summary of an evaluation log.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a JSON summary of it to.
|
||||
*/
|
||||
async generateJsonLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=predicates',
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating JSON log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the results from a bqrs.
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
@@ -751,7 +792,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${err.stderr || err}`);
|
||||
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -894,8 +935,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'download'], packs, 'Downloading packs');
|
||||
}
|
||||
|
||||
async packInstall(dir: string) {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], [dir], 'Installing pack dependencies');
|
||||
async packInstall(dir: string, forceUpdate = false) {
|
||||
const args = [dir];
|
||||
if (forceUpdate) {
|
||||
args.push('--mode', 'update');
|
||||
}
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], args, 'Installing pack dependencies');
|
||||
}
|
||||
|
||||
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, precompile = true): Promise<void> {
|
||||
@@ -940,6 +985,10 @@ export class CodeQLCliServer implements Disposable {
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
}
|
||||
@@ -1050,7 +1099,7 @@ export async function runCodeQlCliCommand(
|
||||
void logger.log('CLI command succeeded.');
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1106,8 +1155,8 @@ class SplitBuffer {
|
||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||
for (const separator of this.separators) {
|
||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||
const line = this.buffer.substr(0, this.searchIndex);
|
||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
||||
const line = this.buffer.slice(0, this.searchIndex);
|
||||
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||
this.searchIndex = 0;
|
||||
return line;
|
||||
}
|
||||
@@ -1231,7 +1280,7 @@ export class CliVersionConstraint {
|
||||
public static CLI_VERSION_WITH_NO_PRECOMPILE = new SemVer('2.7.1');
|
||||
|
||||
/**
|
||||
* CLI version where remote queries are supported.
|
||||
* CLI version where remote queries (variant analysis) are supported.
|
||||
*/
|
||||
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||
|
||||
@@ -1240,6 +1289,11 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_RESOLVE_ML_MODELS = new SemVer('2.7.3');
|
||||
|
||||
/**
|
||||
* CLI version where the `resolve ml-models` subcommand was enhanced to work with packaging.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS = new SemVer('2.10.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--old-eval-stats` option to the query server was introduced.
|
||||
*/
|
||||
@@ -1256,6 +1310,17 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
/**
|
||||
* CLI version that supports rotating structured logs to produce one per query.
|
||||
*
|
||||
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||
* and summaries for them.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
@@ -1304,6 +1369,10 @@ export class CliVersionConstraint {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsPreciseResolveMlModels() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsOldEvalStats() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_OLD_EVAL_STATS);
|
||||
}
|
||||
@@ -1315,4 +1384,8 @@ export class CliVersionConstraint {
|
||||
async supportsStructuredEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { telemetryListener } from './telemetry';
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
@@ -121,8 +122,9 @@ export function commandRunner(
|
||||
try {
|
||||
return await task(...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -132,8 +134,8 @@ export function commandRunner(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
fullMessage
|
||||
@@ -173,8 +175,9 @@ export function commandRunnerWithProgress<R>(
|
||||
try {
|
||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -184,8 +187,8 @@ export function commandRunnerWithProgress<R>(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
outputLogger,
|
||||
|
||||
@@ -21,6 +21,8 @@ import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||
|
||||
interface ComparePair {
|
||||
from: CompletedLocalQueryInfo;
|
||||
@@ -38,6 +40,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private cliServer: CodeQLCliServer,
|
||||
private logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider,
|
||||
private showQueryResultsCallback: (
|
||||
item: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
@@ -70,7 +73,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
try {
|
||||
rows = this.compareResults(fromResultSet, toResultSet);
|
||||
} catch (e) {
|
||||
message = e.message;
|
||||
message = getErrorMessage(e);
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
@@ -80,12 +83,12 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
// since we split the description into several rows
|
||||
// only run interpolation if the label is user-defined
|
||||
// otherwise we will wind up with duplicated rows
|
||||
name: from.getShortLabel(),
|
||||
name: this.labelProvider.getShortLabel(from),
|
||||
status: from.completedQuery.statusString,
|
||||
time: from.startTime,
|
||||
},
|
||||
toQuery: {
|
||||
name: to.getShortLabel(),
|
||||
name: this.labelProvider.getShortLabel(to),
|
||||
status: to.completedQuery.statusString,
|
||||
time: to.startTime,
|
||||
},
|
||||
|
||||
@@ -2,7 +2,7 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
||||
import { DistributionManager } from './distribution';
|
||||
import { logger } from './logging';
|
||||
import { ONE_DAY_IN_MS } from './pure/helpers-pure';
|
||||
import { ONE_DAY_IN_MS } from './pure/time';
|
||||
|
||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||
export class Setting {
|
||||
@@ -59,7 +59,7 @@ const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIB
|
||||
// Query History configuration
|
||||
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
||||
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
const QUERY_HISTORY_TTL = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
const QUERY_HISTORY_TTL = new Setting('ttl', QUERY_HISTORY_SETTING);
|
||||
|
||||
/** When these settings change, the distribution should be updated. */
|
||||
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
||||
@@ -322,11 +322,11 @@ export function isCanary() {
|
||||
*/
|
||||
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
||||
|
||||
// Settings for remote queries
|
||||
const REMOTE_QUERIES_SETTING = new Setting('remoteQueries', ROOT_SETTING);
|
||||
// Settings for variant analysis
|
||||
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
|
||||
|
||||
/**
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Remote query" command.
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a JSON object where each key is a user-specified name (string),
|
||||
@@ -343,7 +343,22 @@ export async function setRemoteRepositoryLists(lists: Record<string, string[]> |
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Remote query" command.
|
||||
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||
* the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a path to a JSON file that contains a JSON object where each key is a
|
||||
* user-specified name (string), and the value is an array of GitHub repositories
|
||||
* (of the form `<owner>/<repo>`).
|
||||
*/
|
||||
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getRemoteRepositoryListsPath(): string | undefined {
|
||||
return REPO_LISTS_PATH.getValue<string>() || undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
|
||||
@@ -357,3 +372,18 @@ export function getRemoteControllerRepo(): string | undefined {
|
||||
export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
|
||||
}
|
||||
|
||||
/**
|
||||
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||
* Default value is "main".
|
||||
* Note: This command is only available for internal users.
|
||||
*/
|
||||
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getActionBranch(): string {
|
||||
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||
}
|
||||
|
||||
export function isIntegrationTestMode() {
|
||||
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
||||
queryResults: QueryWithResults,
|
||||
private cli: CodeQLCliServer,
|
||||
public db: DatabaseItem,
|
||||
public fileName: string
|
||||
public fileName: Uri
|
||||
) {
|
||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ async function resolveQueriesFromPacks(cli: CodeQLCliServer, qlpacks: string[],
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(suiteFile, yaml.safeDump(suiteYaml), 'utf8');
|
||||
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), 'utf8');
|
||||
|
||||
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
|
||||
return queries;
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
TextDocument,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath, zipArchiveScheme } from '../archive-filesystem-provider';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
@@ -160,7 +159,7 @@ export class TemplatePrintAstProvider {
|
||||
return new AstBuilder(
|
||||
query, this.cli,
|
||||
this.dbm.findDatabaseItem(dbUri)!,
|
||||
path.basename(fileUri.fsPath),
|
||||
fileUri,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ import {
|
||||
} from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { tmpDir } from './helpers';
|
||||
import { Credentials } from './authentication';
|
||||
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||
@@ -46,8 +48,10 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -61,6 +65,82 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from GitHub.
|
||||
* User enters a GitHub repository and then the user is asked which language
|
||||
* to download (if there is more than one)
|
||||
*
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
*/
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose repository',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const githubRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!githubRepo) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!looksLikeGithubRepo(githubRepo)) {
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, credentials, progress);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { databaseUrl, name, owner } = result;
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
* {
|
||||
* token: 'xxxx',
|
||||
* tokenType: 'oauth',
|
||||
* type: 'token',
|
||||
* }
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
if (!octokitToken) {
|
||||
// Just print a generic error message for now. Ideally we could show more debugging info, like the
|
||||
// octokit object, but that would expose a user token.
|
||||
throw new Error('Unable to get GitHub token.');
|
||||
}
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': `Bearer ${octokitToken}` },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
`${owner}/${name}`,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
return item;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from lgtm.
|
||||
* User enters a project url and then the user is asked which language
|
||||
@@ -90,12 +170,14 @@ export async function promptImportLgtmDatabase(
|
||||
}
|
||||
|
||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl, progress);
|
||||
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||
if (databaseUrl) {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -140,8 +222,10 @@ export async function importArchiveDatabase(
|
||||
try {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -152,7 +236,7 @@ export async function importArchiveDatabase(
|
||||
}
|
||||
return item;
|
||||
} catch (e) {
|
||||
if (e.message.includes('unexpected end of file')) {
|
||||
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||
} else {
|
||||
// delegate
|
||||
@@ -166,15 +250,19 @@ export async function importArchiveDatabase(
|
||||
* or in the local filesystem.
|
||||
*
|
||||
* @param databaseUrl URL from which to grab the database
|
||||
* @param requestHeaders Headers to send with the request
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
* @param nameOverride a name for the database that overrides the default
|
||||
* @param progress callback to send progress messages to
|
||||
* @param token cancellation token
|
||||
*/
|
||||
async function databaseArchiveFetcher(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
nameOverride: string | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer,
|
||||
@@ -193,7 +281,7 @@ async function databaseArchiveFetcher(
|
||||
if (isFile(databaseUrl)) {
|
||||
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
} else {
|
||||
await fetchAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||
}
|
||||
|
||||
progress({
|
||||
@@ -216,7 +304,7 @@ async function databaseArchiveFetcher(
|
||||
});
|
||||
await ensureZippedSourceLocation(dbPath);
|
||||
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath));
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
|
||||
await databaseManager.setCurrentDatabaseItem(item);
|
||||
return item;
|
||||
} else {
|
||||
@@ -292,6 +380,7 @@ async function readAndUnzip(
|
||||
|
||||
async function fetchAndUnzip(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
@@ -310,7 +399,10 @@ async function fetchAndUnzip(
|
||||
step: 1,
|
||||
});
|
||||
|
||||
const response = await checkForFailingResponse(await fetch(databaseUrl), 'Error downloading database');
|
||||
const response = await checkForFailingResponse(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
'Error downloading database'
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get('content-length');
|
||||
@@ -325,7 +417,6 @@ async function fetchAndUnzip(
|
||||
|
||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
|
||||
|
||||
|
||||
// remove archivePath eagerly since these archives can be large.
|
||||
await fs.remove(archivePath);
|
||||
}
|
||||
@@ -381,6 +472,89 @@ export async function findDirWithFile(
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||
*
|
||||
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||
* name with owner (NWO): `<owner>/<repo>`.
|
||||
*
|
||||
* @param githubRepo The GitHub repository URL or NWO
|
||||
*
|
||||
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||
*/
|
||||
export function looksLikeGithubRepo(
|
||||
githubRepo: string | undefined
|
||||
): githubRepo is string {
|
||||
if (!githubRepo) {
|
||||
return false;
|
||||
}
|
||||
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GitHub repository URL to the corresponding NWO.
|
||||
* @param githubUrl The GitHub repository URL
|
||||
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||
*/
|
||||
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
try {
|
||||
const uri = Uri.parse(githubUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
return;
|
||||
}
|
||||
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||
return;
|
||||
}
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
const nwo = `${paths[0]}/${paths[1]}`;
|
||||
if (REPO_REGEX.test(nwo)) {
|
||||
return nwo;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// Ignore the error here, since we catch failures at a higher level.
|
||||
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback): Promise<{
|
||||
databaseUrl: string,
|
||||
owner: string,
|
||||
name: string
|
||||
} | undefined> {
|
||||
try {
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
|
||||
owner,
|
||||
name: repo
|
||||
};
|
||||
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||
@@ -416,7 +590,7 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
const paths = uri.path.split('/').filter((segment) => segment);
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
return paths.length >= 4 && paths[0] === 'projects';
|
||||
} catch (e) {
|
||||
return false;
|
||||
@@ -446,7 +620,7 @@ function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
}
|
||||
|
||||
// exported for testing
|
||||
export async function convertToDatabaseUrl(
|
||||
export async function convertLgtmUrlToDatabaseUrl(
|
||||
lgtmUrl: string,
|
||||
progress: ProgressCallback) {
|
||||
try {
|
||||
@@ -467,7 +641,9 @@ export async function convertToDatabaseUrl(
|
||||
}
|
||||
}
|
||||
|
||||
const language = await promptForLanguage(projectJson, progress);
|
||||
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
@@ -479,7 +655,7 @@ export async function convertToDatabaseUrl(
|
||||
language,
|
||||
].join('/')}`;
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${e.message}`);
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||
}
|
||||
}
|
||||
@@ -487,7 +663,7 @@ export async function convertToDatabaseUrl(
|
||||
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment) => segment)
|
||||
uri.path.split('/').filter((segment: string) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
@@ -495,7 +671,7 @@ async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
}
|
||||
|
||||
async function promptForLanguage(
|
||||
projectJson: any,
|
||||
languages: string[],
|
||||
progress: ProgressCallback
|
||||
): Promise<string | undefined> {
|
||||
progress({
|
||||
@@ -503,17 +679,19 @@ async function promptForLanguage(
|
||||
step: 2,
|
||||
maxStep: 2
|
||||
});
|
||||
if (!projectJson?.languages?.length) {
|
||||
return;
|
||||
if (!languages.length) {
|
||||
throw new Error('No databases found');
|
||||
}
|
||||
if (projectJson.languages.length === 1) {
|
||||
return projectJson.languages[0].language;
|
||||
if (languages.length === 1) {
|
||||
return languages[0];
|
||||
}
|
||||
|
||||
return await window.showQuickPick(
|
||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
||||
placeHolder: 'Select the database language to download:'
|
||||
}
|
||||
languages,
|
||||
{
|
||||
placeHolder: 'Select the database language to download:',
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,11 +33,13 @@ import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
promptImportInternetDatabase,
|
||||
promptImportLgtmDatabase,
|
||||
} from './databaseFetcher';
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -219,7 +221,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -291,6 +294,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseGithub',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await this.getCredentials();
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseLgtm',
|
||||
@@ -376,7 +393,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -444,7 +461,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -462,6 +479,21 @@ export class DatabaseUI extends DisposableObject {
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportGithubDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
credentials,
|
||||
progress,
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseLgtm = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
@@ -590,8 +622,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${e.message
|
||||
}`
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -19,6 +19,7 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -147,7 +148,7 @@ export async function findSourceArchive(
|
||||
}
|
||||
|
||||
async function resolveDatabase(
|
||||
databasePath: string
|
||||
databasePath: string,
|
||||
): Promise<DatabaseContents> {
|
||||
|
||||
const name = path.basename(databasePath);
|
||||
@@ -169,7 +170,9 @@ async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
||||
return await glob('*.dbscheme', { cwd: dbDirectory });
|
||||
}
|
||||
|
||||
async function resolveDatabaseContents(uri: vscode.Uri): Promise<DatabaseContents> {
|
||||
async function resolveDatabaseContents(
|
||||
uri: vscode.Uri,
|
||||
): Promise<DatabaseContents> {
|
||||
if (uri.scheme !== 'file') {
|
||||
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
||||
}
|
||||
@@ -359,7 +362,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
}
|
||||
catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -568,14 +571,15 @@ export class DatabaseManager extends DisposableObject {
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
uri: vscode.Uri,
|
||||
displayName?: string
|
||||
): Promise<DatabaseItem> {
|
||||
const contents = await resolveDatabaseContents(uri);
|
||||
// Ignore the source archive for QLTest databases by default.
|
||||
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
||||
const fullOptions: FullDatabaseOptions = {
|
||||
ignoreSourceArchive: isQLTestDatabase,
|
||||
// displayName is only set if a user explicitly renames a database
|
||||
displayName: undefined,
|
||||
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||
displayName,
|
||||
dateAdded: Date.now(),
|
||||
language: await this.getPrimaryLanguage(uri.fsPath)
|
||||
};
|
||||
@@ -726,7 +730,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -841,7 +845,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
void logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||
}
|
||||
|
||||
// note that we use undefined as the item in order to reset the entire tree
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'source-map-support/register';
|
||||
import {
|
||||
CancellationToken,
|
||||
CancellationTokenSource,
|
||||
@@ -65,7 +66,7 @@ import {
|
||||
showInformationMessageWithAction,
|
||||
tmpDir
|
||||
} from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
@@ -93,11 +94,10 @@ import { Credentials } from './authentication';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQueryResult } from './remote-queries/remote-query-result';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries/remote-queries-interface';
|
||||
import * as sampleData from './remote-queries/sample-data';
|
||||
import { handleDownloadPacks, handleInstallPackDependencies } from './packaging';
|
||||
import { AnalysesResultsManager } from './remote-queries/analyses-results-manager';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { exportRemoteQueryResults } from './remote-queries/export-results';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
@@ -436,7 +436,8 @@ async function activateWithInstalledDistribution(
|
||||
dbm,
|
||||
qs,
|
||||
getContextStoragePath(ctx),
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
() => Credentials.initialize(ctx),
|
||||
);
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
@@ -448,28 +449,35 @@ async function activateWithInstalledDistribution(
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
const labelProvider = new HistoryItemLabelProvider(queryHistoryConfigurationListener);
|
||||
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(intm);
|
||||
|
||||
void logger.log('Initializing variant analysis manager.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, queryStorageDir, logger);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
void logger.log('Initializing query history.');
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
dbm,
|
||||
intm,
|
||||
rqm,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
labelProvider,
|
||||
async (from: CompletedLocalQueryInfo, to: CompletedLocalQueryInfo) =>
|
||||
showResultsForComparison(from, to),
|
||||
);
|
||||
|
||||
qhm.onWillOpenQueryItem(async item => {
|
||||
if (item.t === 'local' && item.completed) {
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.Forced);
|
||||
}
|
||||
});
|
||||
|
||||
ctx.subscriptions.push(qhm);
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger);
|
||||
ctx.subscriptions.push(intm);
|
||||
|
||||
void logger.log('Reading query history');
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
void logger.log('Initializing compare panel interface.');
|
||||
const cmpm = new CompareInterfaceManager(
|
||||
@@ -477,6 +485,7 @@ async function activateWithInstalledDistribution(
|
||||
dbm,
|
||||
cliServer,
|
||||
queryServerLogger,
|
||||
labelProvider,
|
||||
showResults
|
||||
);
|
||||
ctx.subscriptions.push(cmpm);
|
||||
@@ -491,7 +500,7 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -526,7 +535,7 @@ async function activateWithInstalledDistribution(
|
||||
token.onCancellationRequested(() => source.cancel());
|
||||
|
||||
const initialInfo = await createInitialQueryInfo(selectedQuery, databaseInfo, quickEval, range);
|
||||
const item = new LocalQueryInfo(initialInfo, queryHistoryConfigurationListener, source);
|
||||
const item = new LocalQueryInfo(initialInfo, source);
|
||||
qhm.addQuery(item);
|
||||
try {
|
||||
const completedQueryInfo = await compileAndRunQueryAgainstDatabase(
|
||||
@@ -537,14 +546,17 @@ async function activateWithInstalledDistribution(
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
item.completeThisQuery(completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
e.message = `Error running query: ${e.message}`;
|
||||
item.failureReason = e.message;
|
||||
const err = asError(e);
|
||||
err.message = `Error running query: ${err.message}`;
|
||||
item.failureReason = err.message;
|
||||
throw e;
|
||||
} finally {
|
||||
await qhm.refreshTreeView();
|
||||
@@ -569,11 +581,11 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
await commands.executeCommand('markdown.showPreviewToSide', uri);
|
||||
} catch (err) {
|
||||
const errorMessage = err.message.includes('Generating qhelp in markdown') ? (
|
||||
} catch (e) {
|
||||
const errorMessage = getErrorMessage(e).includes('Generating qhelp in markdown') ? (
|
||||
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
|
||||
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${e}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -696,9 +708,9 @@ async function activateWithInstalledDistribution(
|
||||
for (const item of quickpick) {
|
||||
try {
|
||||
await compileAndRunQuery(false, uri, progress, token, item.databaseItem);
|
||||
} catch (error) {
|
||||
} catch (e) {
|
||||
skippedDatabases.push(item.label);
|
||||
errors.push(error.message);
|
||||
errors.push(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
if (skippedDatabases.length > 0) {
|
||||
@@ -836,20 +848,12 @@ async function activateWithInstalledDistribution(
|
||||
)
|
||||
);
|
||||
|
||||
void logger.log('Initializing remote queries interface.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, qhm, queryStorageDir, logger);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
// wait until after the remote queries manager is initialized to read the query history
|
||||
// since the rqm is notified of queries being added.
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
|
||||
registerRemoteQueryTextProvider();
|
||||
|
||||
// The "runRemoteQuery" command is internal-only.
|
||||
// The "runVariantAnalysis" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.runRemoteQuery', async (
|
||||
commandRunnerWithProgress('codeQL.runVariantAnalysis', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
@@ -866,21 +870,28 @@ async function activateWithInstalledDistribution(
|
||||
token
|
||||
);
|
||||
} else {
|
||||
throw new Error('Remote queries require the CodeQL Canary version to run.');
|
||||
throw new Error('Variant analysis requires the CodeQL Canary version to run.');
|
||||
}
|
||||
}, {
|
||||
title: 'Run Remote Query',
|
||||
title: 'Run Variant Analysis',
|
||||
cancellable: true
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorRemoteQuery', async (
|
||||
queryItem: RemoteQueryHistoryItem,
|
||||
queryId: string,
|
||||
query: RemoteQuery,
|
||||
token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(queryItem, token);
|
||||
await rqm.monitorRemoteQuery(queryId, query, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.copyRepoList', async (queryId: string) => {
|
||||
await rqm.copyRemoteQueryRepoListToClipboard(queryId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadRemoteQueryResults', async (
|
||||
queryResult: RemoteQueryResult,
|
||||
@@ -889,15 +900,10 @@ async function activateWithInstalledDistribution(
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.showFakeRemoteQueryResults', async () => {
|
||||
const analysisResultsManager = new AnalysesResultsManager(ctx, queryStorageDir, logger);
|
||||
const rqim = new RemoteQueriesInterfaceManager(ctx, logger, analysisResultsManager);
|
||||
await rqim.showResults(sampleData.sampleRemoteQuery, sampleData.sampleRemoteQueryResult);
|
||||
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage1);
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage2);
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage3);
|
||||
}));
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async () => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
@@ -945,6 +951,18 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Choose a Database from an Archive'
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseGithub', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseLgtm', (
|
||||
progress: ProgressCallback,
|
||||
|
||||
@@ -76,9 +76,10 @@ export async function showAndLogWarningMessage(message: string, {
|
||||
*/
|
||||
export async function showAndLogInformationMessage(message: string, {
|
||||
outputLogger = logger,
|
||||
items = [] as string[]
|
||||
items = [] as string[],
|
||||
fullMessage = ''
|
||||
} = {}): Promise<string | undefined> {
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage);
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
|
||||
}
|
||||
|
||||
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
||||
@@ -288,7 +289,7 @@ interface QlPackWithPath {
|
||||
async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string): Promise<{ name: string; isLibraryPack: boolean; }> {
|
||||
for (const { packDir, packName } of packs) {
|
||||
if (packDir !== undefined) {
|
||||
const qlpack = yaml.safeLoad(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||
const qlpack = yaml.load(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
||||
return {
|
||||
name: packName,
|
||||
@@ -580,3 +581,11 @@ export async function* walkDirectory(dir: string): AsyncIterableIterator<string>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluralizes a word.
|
||||
* Example: Returns "N repository" if N is one, "N repositories" otherwise.
|
||||
*/
|
||||
export function pluralize(numItems: number | undefined, singular: string, plural: string): string {
|
||||
return numItems ? `${numItems} ${numItems === 1 ? singular : plural}` : '';
|
||||
}
|
||||
|
||||
90
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
90
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { pluralize } from './helpers';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
q: string; // Query name
|
||||
d: string; // Database/Controller repo name
|
||||
r: string; // Result count/Empty
|
||||
s: string; // Status
|
||||
f: string; // Query file name
|
||||
'%': '%'; // Percent sign
|
||||
}
|
||||
|
||||
export class HistoryItemLabelProvider {
|
||||
constructor(private config: QueryHistoryConfig) {
|
||||
/**/
|
||||
}
|
||||
|
||||
getLabel(item: QueryHistoryInfo) {
|
||||
const replacements = item.t === 'local'
|
||||
? this.getLocalInterpolateReplacements(item)
|
||||
: this.getRemoteInterpolateReplacements(item);
|
||||
|
||||
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||
|
||||
return this.interpolate(rawLabel, replacements);
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a user-specified label for this query, interpolate and use that.
|
||||
* Otherwise, use the raw name of this query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(item: QueryHistoryInfo): string {
|
||||
return item.userSpecifiedLabel
|
||||
? this.getLabel(item)
|
||||
: item.t === 'local'
|
||||
? item.getQueryName()
|
||||
: item.remoteQuery.queryName;
|
||||
}
|
||||
|
||||
|
||||
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||
return rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
}
|
||||
|
||||
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
|
||||
return {
|
||||
t: item.startTime,
|
||||
q: item.getQueryName(),
|
||||
d: item.initialInfo.databaseInfo.name,
|
||||
r: `(${resultCount} results)`,
|
||||
s: statusString,
|
||||
f: item.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
|
||||
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
|
||||
private buildRepoLabel(item: RemoteQueryHistoryItem): string {
|
||||
const repositoryCount = item.remoteQuery.repositoryCount;
|
||||
|
||||
if (repositoryCount) {
|
||||
return pluralize(repositoryCount, 'repository', 'repositories');
|
||||
}
|
||||
|
||||
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||
d: this.buildRepoLabel(item),
|
||||
r: `(${pluralize(item.resultCount, 'result', 'results')})`,
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
'%': '%'
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -137,6 +137,8 @@ export function getHtmlForWebview(
|
||||
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
|
||||
: `'nonce-${nonce}'`;
|
||||
|
||||
const fontSrc = webview.cspSource;
|
||||
|
||||
/*
|
||||
* Content security policy:
|
||||
* default-src: allow nothing by default.
|
||||
@@ -149,7 +151,7 @@ export function getHtmlForWebview(
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Security-Policy"
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; style-src ${styleSrc}; connect-src ${webview.cspSource};">
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${webview.cspSource};">
|
||||
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -15,7 +15,7 @@ import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
Interpretation,
|
||||
@@ -49,6 +49,7 @@ import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-type
|
||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||
import { PAGE_SIZE } from './config';
|
||||
import { CompletedLocalQueryInfo } from './query-results';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
|
||||
/**
|
||||
* interface.ts
|
||||
@@ -136,7 +137,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
public ctx: vscode.ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
public cliServer: CodeQLCliServer,
|
||||
public logger: Logger
|
||||
public logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider
|
||||
) {
|
||||
super();
|
||||
this.push(this._diagnosticCollection);
|
||||
@@ -353,8 +355,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message, {
|
||||
fullMessage: e.stack
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -416,7 +418,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// more asynchronous message to not so abruptly interrupt
|
||||
// user's workflow by immediately revealing the panel.
|
||||
const showButton = 'View Results';
|
||||
const queryName = fullQuery.getShortLabel();
|
||||
const queryName = this.labelProvider.getShortLabel(fullQuery);
|
||||
const resultPromise = vscode.window.showInformationMessage(
|
||||
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
|
||||
}.`,
|
||||
@@ -483,7 +485,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
database: fullQuery.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering,
|
||||
metadata: fullQuery.completedQuery.query.metadata,
|
||||
queryName: fullQuery.label,
|
||||
queryName: this.labelProvider.getLabel(fullQuery),
|
||||
queryPath: fullQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -516,7 +518,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
resultSetNames,
|
||||
pageSize: interpretedPageSize(this._interpretation),
|
||||
numPages: numInterpretedPages(this._interpretation),
|
||||
queryName: this._displayedQuery.label,
|
||||
queryName: this.labelProvider.getLabel(this._displayedQuery),
|
||||
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -601,7 +603,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
database: results.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering: false,
|
||||
metadata: results.completedQuery.query.metadata,
|
||||
queryName: results.label,
|
||||
queryName: this.labelProvider.getLabel(results),
|
||||
queryPath: results.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -729,7 +731,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// If interpretation fails, accept the error and continue
|
||||
// trying to render uninterpreted results anyway.
|
||||
void showAndLogErrorMessage(
|
||||
`Showing raw results instead of interpreted ones due to an error. ${e.message}`
|
||||
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -768,9 +770,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
try {
|
||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : e.toString();
|
||||
void this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${msg}`
|
||||
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||
);
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
|
||||
@@ -79,11 +79,11 @@ export interface WholeFileLocation {
|
||||
|
||||
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
|
||||
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
|
||||
export type ColumnValue = EntityValue | number | string | boolean;
|
||||
export type CellValue = EntityValue | number | string | boolean;
|
||||
|
||||
export type ResultRow = ColumnValue[];
|
||||
export type ResultRow = CellValue[];
|
||||
|
||||
export interface RawResultSet {
|
||||
readonly schema: ResultSetSchema;
|
||||
@@ -103,7 +103,14 @@ export function transformBqrsResultSet(
|
||||
};
|
||||
}
|
||||
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: ColumnValue[][];
|
||||
next?: number;
|
||||
type BqrsKind = 'String' | 'Float' | 'Integer' | 'String' | 'Boolean' | 'Date' | 'Entity';
|
||||
|
||||
interface BqrsColumn {
|
||||
name: string;
|
||||
kind: BqrsKind;
|
||||
}
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: CellValue[][];
|
||||
next?: number;
|
||||
columns: BqrsColumn[];
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
LineColumnLocation,
|
||||
WholeFileLocation
|
||||
} from './bqrs-cli-types';
|
||||
import { createRemoteFileRef } from './location-link-utils';
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
@@ -93,3 +94,45 @@ export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
export function isStringLoc(loc: UrlValue): loc is string {
|
||||
return typeof loc === 'string';
|
||||
}
|
||||
|
||||
export function tryGetRemoteLocation(
|
||||
loc: UrlValue | undefined,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string | undefined,
|
||||
): string | undefined {
|
||||
const resolvableLocation = tryGetResolvableLocation(loc);
|
||||
if (!resolvableLocation) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let trimmedLocation: string;
|
||||
|
||||
// Remote locations have the following format:
|
||||
// "file:${sourceLocationPrefix}/relative/path/to/file"
|
||||
// So we need to strip off the first part to get the relative path.
|
||||
if (sourceLocationPrefix) {
|
||||
if (!resolvableLocation.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
|
||||
return undefined;
|
||||
}
|
||||
trimmedLocation = resolvableLocation.uri.replace(`file:${sourceLocationPrefix}/`, '');
|
||||
} else {
|
||||
// If the source location prefix is empty (e.g. for older remote queries), we assume that the database
|
||||
// was created on a Linux actions runner and has the format:
|
||||
// "file:/home/runner/work/<repo>/<repo>/relative/path/to/file"
|
||||
// So we need to drop the first 6 parts of the path.
|
||||
if (!resolvableLocation.uri.startsWith('file:/home/runner/work/')) {
|
||||
return undefined;
|
||||
}
|
||||
const locationParts = resolvableLocation.uri.split('/');
|
||||
trimmedLocation = locationParts.slice(6, locationParts.length).join('/');
|
||||
}
|
||||
|
||||
const fileLink = {
|
||||
fileLinkPrefix,
|
||||
filePath: trimmedLocation,
|
||||
};
|
||||
return createRemoteFileRef(
|
||||
fileLink,
|
||||
resolvableLocation.startLine,
|
||||
resolvableLocation.endLine);
|
||||
}
|
||||
|
||||
@@ -31,7 +31,27 @@ export const asyncFilter = async function <T>(arr: T[], predicate: (arg0: T) =>
|
||||
return arr.filter((_, index) => results[index]);
|
||||
};
|
||||
|
||||
export const ONE_DAY_IN_MS = 24 * 60 * 60 * 1000;
|
||||
export const ONE_HOUR_IN_MS = 1000 * 60 * 60;
|
||||
export const TWO_HOURS_IN_MS = 1000 * 60 * 60 * 2;
|
||||
export const THREE_HOURS_IN_MS = 1000 * 60 * 60 * 3;
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
*/
|
||||
export const REPO_REGEX = /^[a-zA-Z0-9-_\.]+\/[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
/**
|
||||
* This regex matches GiHub organization and user strings. These are made up for alphanumeric
|
||||
* characters, hyphens, underscores or periods.
|
||||
*/
|
||||
export const OWNER_REGEX = /^[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
export function getErrorMessage(e: any) {
|
||||
return e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
export function getErrorStack(e: any) {
|
||||
return e instanceof Error ? e.stack ?? '' : '';
|
||||
}
|
||||
|
||||
export function asError(e: any): Error {
|
||||
return e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
@@ -395,7 +395,8 @@ export type FromRemoteQueriesMessage =
|
||||
| OpenVirtualFileMsg
|
||||
| RemoteQueryDownloadAnalysisResultsMessage
|
||||
| RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
| RemoteQueryViewAnalysisResultsMessage;
|
||||
| RemoteQueryExportResultsMessage
|
||||
| CopyRepoListMessage;
|
||||
|
||||
export type ToRemoteQueriesMessage =
|
||||
| SetRemoteQueryResultMessage
|
||||
@@ -430,7 +431,11 @@ export interface RemoteQueryDownloadAllAnalysesResultsMessage {
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryViewAnalysisResultsMessage {
|
||||
t: 'remoteQueryViewAnalysisResults';
|
||||
analysisSummary: AnalysisSummary
|
||||
export interface RemoteQueryExportResultsMessage {
|
||||
t: 'remoteQueryExportResults';
|
||||
}
|
||||
|
||||
export interface CopyRepoListMessage {
|
||||
t: 'copyRepoList';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { FileLink } from '../remote-queries/shared/analysis-result';
|
||||
|
||||
export function createRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number
|
||||
): string {
|
||||
if (startLine && endLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}-L${endLine}`;
|
||||
} else if (startLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}`;
|
||||
} else {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}`;
|
||||
}
|
||||
}
|
||||
44
extensions/ql-vscode/src/pure/log-summary-parser.ts
Normal file
44
extensions/ql-vscode/src/pure/log-summary-parser.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import * as os from 'os';
|
||||
|
||||
// TODO(angelapwen): Only load in necessary information and
|
||||
// location in bytes for this log to save memory.
|
||||
export interface EvaluatorLogData {
|
||||
queryCausingWork: string;
|
||||
predicateName: string;
|
||||
millis: number;
|
||||
resultSize: number;
|
||||
ra: Pipelines;
|
||||
}
|
||||
|
||||
interface Pipelines {
|
||||
// Key: pipeline identifier; Value: array of pipeline steps
|
||||
pipelineNamesToSteps: Map<string, string[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* A pure method that parses a string of evaluator log summaries into
|
||||
* an array of EvaluatorLogData objects.
|
||||
*
|
||||
*/
|
||||
export function parseVisualizerData(logSummary: string): EvaluatorLogData[] {
|
||||
// Remove newline delimiters because summary is in .jsonl format.
|
||||
const jsonSummaryObjects: string[] = logSummary.split(os.EOL + os.EOL);
|
||||
const visualizerData: EvaluatorLogData[] = [];
|
||||
|
||||
for (const obj of jsonSummaryObjects) {
|
||||
const jsonObj = JSON.parse(obj);
|
||||
|
||||
// Only convert log items that have an RA and millis field
|
||||
if (jsonObj.ra !== undefined && jsonObj.millis !== undefined) {
|
||||
const newLogData: EvaluatorLogData = {
|
||||
queryCausingWork: jsonObj.queryCausingWork,
|
||||
predicateName: jsonObj.predicateName,
|
||||
millis: jsonObj.millis,
|
||||
resultSize: jsonObj.resultSize,
|
||||
ra: jsonObj.ra
|
||||
};
|
||||
visualizerData.push(newLogData);
|
||||
}
|
||||
}
|
||||
return visualizerData;
|
||||
}
|
||||
@@ -646,6 +646,35 @@ export interface ClearCacheParams {
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path where we want to place the new structured log
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path of the log to terminate, will be a no-op if we aren't logging here
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
@@ -682,6 +711,26 @@ export interface ClearCacheResult {
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of starting a new structured log.
|
||||
*/
|
||||
export interface StartLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of terminating a structured log.
|
||||
*/
|
||||
export interface EndLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for running a set of queries
|
||||
*/
|
||||
@@ -1018,6 +1067,16 @@ export const compileUpgrade = new rpc.RequestType<WithProgressId<CompileUpgradeP
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<CompileUpgradeSequenceParams>, CompileUpgradeSequenceResult, void, void>('compilation/compileUpgradeSequence');
|
||||
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as Sarif from 'sarif';
|
||||
import { HighlightedRegion } from '../remote-queries/shared/analysis-result';
|
||||
import { ResolvableLocationValue } from './bqrs-cli-types';
|
||||
|
||||
export interface SarifLink {
|
||||
@@ -127,35 +128,111 @@ export function parseSarifLocation(
|
||||
userVisibleFile
|
||||
} as ParsedSarifLocation;
|
||||
} else {
|
||||
const region = physicalLocation.region;
|
||||
// We assume that the SARIF we're given always has startLine
|
||||
// This is not mandated by the SARIF spec, but should be true of
|
||||
// SARIF output by our own tools.
|
||||
const startLine = region.startLine!;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// We also assume that our tools will always supply `endColumn` field, which is
|
||||
// fortunate, since the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = region.endColumn! - 1;
|
||||
const region = parseSarifRegion(physicalLocation.region);
|
||||
|
||||
return {
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn,
|
||||
...region
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSarifRegion(
|
||||
region: Sarif.Region
|
||||
): {
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
startColumn: number,
|
||||
endColumn: number
|
||||
} {
|
||||
// The SARIF we're given should have a startLine, but we
|
||||
// fall back to 1, just in case something has gone wrong.
|
||||
const startLine = region.startLine ?? 1;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// Our tools should always supply `endColumn` field, which is fortunate, since
|
||||
// the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code. We fall back to 1,
|
||||
// just in case something has gone wrong.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = (region.endColumn ?? 1) - 1;
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn
|
||||
};
|
||||
}
|
||||
|
||||
export function isNoLocation(loc: ParsedSarifLocation): loc is NoLocation {
|
||||
return 'hint' in loc;
|
||||
}
|
||||
|
||||
// Some helpers for highlighting specific regions from a SARIF code snippet
|
||||
|
||||
/**
|
||||
* Checks whether a particular line (determined by its line number in the original file)
|
||||
* is part of the highlighted region of a SARIF code snippet.
|
||||
*/
|
||||
export function shouldHighlightLine(
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): boolean {
|
||||
if (lineNumber < highlightedRegion.startLine) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (highlightedRegion.endLine == undefined) {
|
||||
return lineNumber == highlightedRegion.startLine;
|
||||
}
|
||||
|
||||
return lineNumber <= highlightedRegion.endLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* A line of code split into: plain text before the highlighted section, the highlighted
|
||||
* text itself, and plain text after the highlighted section.
|
||||
*/
|
||||
export interface PartiallyHighlightedLine {
|
||||
plainSection1: string;
|
||||
highlightedSection: string;
|
||||
plainSection2: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a line of code into the highlighted and non-highlighted sections.
|
||||
*/
|
||||
export function parseHighlightedLine(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): PartiallyHighlightedLine {
|
||||
const isSingleLineHighlight = highlightedRegion.endLine === undefined;
|
||||
const isFirstHighlightedLine = lineNumber === highlightedRegion.startLine;
|
||||
const isLastHighlightedLine = lineNumber === highlightedRegion.endLine;
|
||||
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
|
||||
const plainSection1 = line.substring(0, highlightStartColumn - 1);
|
||||
const highlightedSection = line.substring(highlightStartColumn - 1, highlightEndColumn - 1);
|
||||
const plainSection2 = line.substring(highlightEndColumn - 1, line.length);
|
||||
|
||||
return { plainSection1, highlightedSection, plainSection2 };
|
||||
}
|
||||
|
||||
85
extensions/ql-vscode/src/pure/time.ts
Normal file
85
extensions/ql-vscode/src/pure/time.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with time, dates, and durations.
|
||||
*/
|
||||
|
||||
export const ONE_MINUTE_IN_MS = 1000 * 60;
|
||||
export const ONE_HOUR_IN_MS = ONE_MINUTE_IN_MS * 60;
|
||||
export const TWO_HOURS_IN_MS = ONE_HOUR_IN_MS * 2;
|
||||
export const THREE_HOURS_IN_MS = ONE_HOUR_IN_MS * 3;
|
||||
export const ONE_DAY_IN_MS = ONE_HOUR_IN_MS * 24;
|
||||
|
||||
// These are approximations
|
||||
export const ONE_MONTH_IN_MS = ONE_DAY_IN_MS * 30;
|
||||
export const ONE_YEAR_IN_MS = ONE_DAY_IN_MS * 365;
|
||||
|
||||
const durationFormatter = new Intl.RelativeTimeFormat('en', {
|
||||
numeric: 'auto',
|
||||
});
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating a relative time in the past or future.
|
||||
*
|
||||
* @param relativeTimeMillis The duration in milliseconds. A negative number indicates a duration in the past. And a positive number is
|
||||
* the future.
|
||||
* @returns A humanized duration. For example, "in 2 minutes", "2 minutes ago", "yesterday", or "tomorrow".
|
||||
*/
|
||||
export function humanizeRelativeTime(relativeTimeMillis?: number) {
|
||||
if (relativeTimeMillis === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (Math.abs(relativeTimeMillis) < ONE_HOUR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MINUTE_IN_MS), 'minute');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_DAY_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_HOUR_IN_MS), 'hour');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_MONTH_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_DAY_IN_MS), 'day');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_YEAR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MONTH_IN_MS), 'month');
|
||||
} else {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_YEAR_IN_MS), 'year');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating an amount of time.
|
||||
* Negative numbers have no meaning and are considered to be "Less than a minute".
|
||||
*
|
||||
* @param millis The number of milliseconds to convert.
|
||||
* @returns A humanized duration. For example, "2 minutes", "2 hours", "2 days", or "2 months".
|
||||
*/
|
||||
export function humanizeUnit(millis?: number): string {
|
||||
// assume a blank or empty string is a zero
|
||||
// assume anything less than 0 is a zero
|
||||
if (!millis || millis < ONE_MINUTE_IN_MS) {
|
||||
return 'Less than a minute';
|
||||
}
|
||||
let unit: string;
|
||||
let unitDiff: number;
|
||||
if (millis < ONE_HOUR_IN_MS) {
|
||||
unit = 'minute';
|
||||
unitDiff = Math.floor(millis / ONE_MINUTE_IN_MS);
|
||||
} else if (millis < ONE_DAY_IN_MS) {
|
||||
unit = 'hour';
|
||||
unitDiff = Math.floor(millis / ONE_HOUR_IN_MS);
|
||||
} else if (millis < ONE_MONTH_IN_MS) {
|
||||
unit = 'day';
|
||||
unitDiff = Math.floor(millis / ONE_DAY_IN_MS);
|
||||
} else if (millis < ONE_YEAR_IN_MS) {
|
||||
unit = 'month';
|
||||
unitDiff = Math.floor(millis / ONE_MONTH_IN_MS);
|
||||
} else {
|
||||
unit = 'year';
|
||||
unitDiff = Math.floor(millis / ONE_YEAR_IN_MS);
|
||||
}
|
||||
|
||||
return createFormatter(unit).format(unitDiff);
|
||||
}
|
||||
|
||||
function createFormatter(unit: string) {
|
||||
return Intl.NumberFormat('en-US', {
|
||||
style: 'unit',
|
||||
unit,
|
||||
unitDisplay: 'long'
|
||||
});
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { Disposable, ExtensionContext } from 'vscode';
|
||||
import { logger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
@@ -30,12 +31,13 @@ export function registerQueryHistoryScubber(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
qhm: QueryHistoryManager,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: Counter
|
||||
): Disposable {
|
||||
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, ctx, counter);
|
||||
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, qhm, ctx, counter);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
@@ -48,6 +50,7 @@ async function scrubQueries(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
qhm: QueryHistoryManager,
|
||||
ctx: ExtensionContext,
|
||||
counter?: Counter
|
||||
) {
|
||||
@@ -89,6 +92,7 @@ async function scrubQueries(
|
||||
} finally {
|
||||
void logger.log(`Scrubbed ${scrubCount} old queries.`);
|
||||
}
|
||||
await qhm.removeDeletedQueries();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,12 +28,22 @@ import { URLSearchParams } from 'url';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { assertNever, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/helpers-pure';
|
||||
import { ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/time';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo as LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { registerQueryHistoryScubber } from './query-history-scrubber';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { slurpQueryHistory, splatQueryHistory } from './query-serialization';
|
||||
import * as fs from 'fs-extra';
|
||||
import { CliVersionConstraint } from './cli';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { Credentials } from './authentication';
|
||||
import { cancelRemoteQuery } from './remote-queries/gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
|
||||
/**
|
||||
* query-history.ts
|
||||
@@ -119,7 +129,10 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
|
||||
private current: QueryHistoryInfo | undefined;
|
||||
|
||||
constructor(extensionPath: string) {
|
||||
constructor(
|
||||
extensionPath: string,
|
||||
private readonly labelProvider: HistoryItemLabelProvider,
|
||||
) {
|
||||
super();
|
||||
this.failedIconPath = path.join(
|
||||
extensionPath,
|
||||
@@ -136,13 +149,13 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
}
|
||||
|
||||
async getTreeItem(element: QueryHistoryInfo): Promise<TreeItem> {
|
||||
const treeItem = new TreeItem(element.label);
|
||||
const treeItem = new TreeItem(this.labelProvider.getLabel(element));
|
||||
|
||||
treeItem.command = {
|
||||
title: 'Query History Item',
|
||||
command: 'codeQLQueryHistory.itemClicked',
|
||||
arguments: [element],
|
||||
tooltip: element.failureReason || element.label
|
||||
tooltip: element.failureReason || this.labelProvider.getLabel(element)
|
||||
};
|
||||
|
||||
// Populate the icon and the context value. We use the context value to
|
||||
@@ -181,38 +194,47 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
): ProviderResult<QueryHistoryInfo[]> {
|
||||
return element ? [] : this.history.sort((h1, h2) => {
|
||||
|
||||
// TODO remote queries are not implemented yet.
|
||||
if (h1.t !== 'local' && h2.t !== 'local') {
|
||||
return 0;
|
||||
}
|
||||
if (h1.t !== 'local') {
|
||||
return -1;
|
||||
}
|
||||
if (h2.t !== 'local') {
|
||||
return 1;
|
||||
}
|
||||
const h1Label = this.labelProvider.getLabel(h1).toLowerCase();
|
||||
const h2Label = this.labelProvider.getLabel(h2).toLowerCase();
|
||||
|
||||
const resultCount1 = h1.completedQuery?.resultCount ?? -1;
|
||||
const resultCount2 = h2.completedQuery?.resultCount ?? -1;
|
||||
const h1Date = h1.t === 'local'
|
||||
? h1.initialInfo.start.getTime()
|
||||
: h1.remoteQuery?.executionStartTime;
|
||||
|
||||
const h2Date = h2.t === 'local'
|
||||
? h2.initialInfo.start.getTime()
|
||||
: h2.remoteQuery?.executionStartTime;
|
||||
|
||||
const resultCount1 = h1.t === 'local'
|
||||
? h1.completedQuery?.resultCount ?? -1
|
||||
: h1.resultCount ?? -1;
|
||||
const resultCount2 = h2.t === 'local'
|
||||
? h2.completedQuery?.resultCount ?? -1
|
||||
: h2.resultCount ?? -1;
|
||||
|
||||
switch (this.sortOrder) {
|
||||
case SortOrder.NameAsc:
|
||||
return h1.label.localeCompare(h2.label, env.language);
|
||||
return h1Label.localeCompare(h2Label, env.language);
|
||||
|
||||
case SortOrder.NameDesc:
|
||||
return h2.label.localeCompare(h1.label, env.language);
|
||||
return h2Label.localeCompare(h1Label, env.language);
|
||||
|
||||
case SortOrder.DateAsc:
|
||||
return h1.initialInfo.start.getTime() - h2.initialInfo.start.getTime();
|
||||
return h1Date - h2Date;
|
||||
|
||||
case SortOrder.DateDesc:
|
||||
return h2.initialInfo.start.getTime() - h1.initialInfo.start.getTime();
|
||||
return h2Date - h1Date;
|
||||
|
||||
case SortOrder.CountAsc:
|
||||
// If the result counts are equal, sort by name.
|
||||
return resultCount1 - resultCount2 === 0
|
||||
? h1.label.localeCompare(h2.label, env.language)
|
||||
? h1Label.localeCompare(h2Label, env.language)
|
||||
: resultCount1 - resultCount2;
|
||||
|
||||
case SortOrder.CountDesc:
|
||||
// If the result counts are equal, sort by name.
|
||||
return resultCount2 - resultCount1 === 0
|
||||
? h2.label.localeCompare(h1.label, env.language)
|
||||
? h2Label.localeCompare(h1Label, env.language)
|
||||
: resultCount2 - resultCount1;
|
||||
default:
|
||||
assertNever(this.sortOrder);
|
||||
@@ -288,25 +310,16 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
queryHistoryScrubber: Disposable | undefined;
|
||||
private queryMetadataStorageLocation;
|
||||
|
||||
private readonly _onDidAddQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
|
||||
readonly onDidAddQueryItem: Event<QueryHistoryInfo> = this
|
||||
._onDidAddQueryItem.event;
|
||||
|
||||
private readonly _onDidRemoveQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
|
||||
readonly onDidRemoveQueryItem: Event<QueryHistoryInfo> = this
|
||||
._onDidRemoveQueryItem.event;
|
||||
|
||||
private readonly _onWillOpenQueryItem = super.push(new EventEmitter<QueryHistoryInfo>());
|
||||
readonly onWillOpenQueryItem: Event<QueryHistoryInfo> = this
|
||||
._onWillOpenQueryItem.event;
|
||||
|
||||
constructor(
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
ctx: ExtensionContext,
|
||||
private queryHistoryConfigListener: QueryHistoryConfig,
|
||||
private doCompareCallback: (
|
||||
private readonly qs: QueryServerClient,
|
||||
private readonly dbm: DatabaseManager,
|
||||
private readonly localQueriesInterfaceManager: InterfaceManager,
|
||||
private readonly remoteQueriesManager: RemoteQueriesManager,
|
||||
private readonly queryStorageDir: string,
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly queryHistoryConfigListener: QueryHistoryConfig,
|
||||
private readonly labelProvider: HistoryItemLabelProvider,
|
||||
private readonly doCompareCallback: (
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
@@ -320,7 +333,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.queryMetadataStorageLocation = path.join((ctx.storageUri || ctx.globalStorageUri).fsPath, WORKSPACE_QUERY_HISTORY_FILE);
|
||||
|
||||
this.treeDataProvider = this.push(new HistoryTreeDataProvider(
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
this.labelProvider
|
||||
));
|
||||
this.treeView = this.push(window.createTreeView('codeQLQueryHistory', {
|
||||
treeDataProvider: this.treeDataProvider,
|
||||
@@ -406,6 +420,18 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.handleOpenQueryDirectory.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.showEvalLog',
|
||||
this.handleShowEvalLog.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.showEvalLogSummary',
|
||||
this.handleShowEvalLogSummary.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.cancel',
|
||||
@@ -418,6 +444,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.handleShowQueryText.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.exportResults',
|
||||
this.handleExportResults.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.viewCsvResults',
|
||||
@@ -458,6 +490,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.copyRepoList',
|
||||
this.handleCopyRepoList.bind(this)
|
||||
)
|
||||
);
|
||||
|
||||
// There are two configuration items that affect the query history:
|
||||
// 1. The ttl for query history items.
|
||||
@@ -466,7 +504,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.push(
|
||||
queryHistoryConfigListener.onDidChangeConfiguration(() => {
|
||||
this.treeDataProvider.refresh();
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, this, ctx);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -485,13 +523,18 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
},
|
||||
}));
|
||||
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, this, ctx);
|
||||
this.registerToRemoteQueriesEvents();
|
||||
}
|
||||
|
||||
private getCredentials() {
|
||||
return Credentials.initialize(this.ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register and create the history scrubber.
|
||||
*/
|
||||
private registerQueryHistoryScrubber(queryHistoryConfigListener: QueryHistoryConfig, ctx: ExtensionContext) {
|
||||
private registerQueryHistoryScrubber(queryHistoryConfigListener: QueryHistoryConfig, qhm: QueryHistoryManager, ctx: ExtensionContext) {
|
||||
this.queryHistoryScrubber?.dispose();
|
||||
// Every hour check if we need to re-run the query history scrubber.
|
||||
this.queryHistoryScrubber = this.push(
|
||||
@@ -500,17 +543,59 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
TWO_HOURS_IN_MS,
|
||||
queryHistoryConfigListener.ttlInMillis,
|
||||
this.queryStorageDir,
|
||||
qhm,
|
||||
ctx
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private registerToRemoteQueriesEvents() {
|
||||
const queryAddedSubscription = this.remoteQueriesManager.onRemoteQueryAdded(event => {
|
||||
this.addQuery({
|
||||
t: 'remote',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
queryId: event.queryId,
|
||||
remoteQuery: event.query,
|
||||
});
|
||||
});
|
||||
|
||||
const queryRemovedSubscription = this.remoteQueriesManager.onRemoteQueryRemoved(async (event) => {
|
||||
const item = this.treeDataProvider.allHistory.find(i => i.t === 'remote' && i.queryId === event.queryId);
|
||||
if (item) {
|
||||
await this.removeRemoteQuery(item as RemoteQueryHistoryItem);
|
||||
}
|
||||
});
|
||||
|
||||
const queryStatusUpdateSubscription = this.remoteQueriesManager.onRemoteQueryStatusUpdate(async (event) => {
|
||||
const item = this.treeDataProvider.allHistory.find(i => i.t === 'remote' && i.queryId === event.queryId);
|
||||
if (item) {
|
||||
const remoteQueryHistoryItem = item as RemoteQueryHistoryItem;
|
||||
remoteQueryHistoryItem.status = event.status;
|
||||
remoteQueryHistoryItem.failureReason = event.failureReason;
|
||||
remoteQueryHistoryItem.resultCount = event.resultCount;
|
||||
if (event.status === QueryStatus.Completed) {
|
||||
remoteQueryHistoryItem.completed = true;
|
||||
}
|
||||
await this.refreshTreeView();
|
||||
} else {
|
||||
void logger.log('Variant analysis status update event received for unknown variant analysis');
|
||||
}
|
||||
});
|
||||
|
||||
this.push(queryAddedSubscription);
|
||||
this.push(queryRemovedSubscription);
|
||||
this.push(queryStatusUpdateSubscription);
|
||||
}
|
||||
|
||||
async readQueryHistory(): Promise<void> {
|
||||
void logger.log(`Reading cached query history from '${this.queryMetadataStorageLocation}'.`);
|
||||
const history = await slurpQueryHistory(this.queryMetadataStorageLocation, this.queryHistoryConfigListener);
|
||||
const history = await slurpQueryHistory(this.queryMetadataStorageLocation);
|
||||
this.treeDataProvider.allHistory = history;
|
||||
this.treeDataProvider.allHistory.forEach((item) => {
|
||||
this._onDidAddQueryItem.fire(item);
|
||||
this.treeDataProvider.allHistory.forEach(async (item) => {
|
||||
if (item.t === 'remote') {
|
||||
await this.remoteQueriesManager.rehydrateRemoteQuery(item.queryId, item.remoteQuery, item.status);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -554,6 +639,19 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
getCurrentQueryHistoryItem(): QueryHistoryInfo | undefined {
|
||||
return this.treeDataProvider.getCurrent();
|
||||
}
|
||||
|
||||
async removeDeletedQueries() {
|
||||
await Promise.all(this.treeDataProvider.allHistory.map(async (item) => {
|
||||
if (item.t == 'local' && item.completedQuery && !(await fs.pathExists(item.completedQuery?.query.querySaveDir))) {
|
||||
this.treeDataProvider.remove(item);
|
||||
item.completedQuery?.dispose();
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
async handleRemoveHistoryItem(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[] = []
|
||||
@@ -572,26 +670,30 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
await item.completedQuery?.query.deleteQuery();
|
||||
}
|
||||
} else {
|
||||
// Remote queries can be removed locally, but not remotely.
|
||||
// The user must cancel the query on GitHub Actions explicitly.
|
||||
this.treeDataProvider.remove(item);
|
||||
void logger.log(`Deleted ${item.label}.`);
|
||||
if (item.status === QueryStatus.InProgress) {
|
||||
void logger.log('The remote query is still running on GitHub Actions. To cancel there, you must go to the query run in your browser.');
|
||||
}
|
||||
|
||||
this._onDidRemoveQueryItem.fire(item);
|
||||
await this.removeRemoteQuery(item);
|
||||
}
|
||||
|
||||
}));
|
||||
|
||||
await this.writeQueryHistory();
|
||||
const current = this.treeDataProvider.getCurrent();
|
||||
if (current !== undefined) {
|
||||
await this.treeView.reveal(current, { select: true });
|
||||
this._onWillOpenQueryItem.fire(current);
|
||||
await this.openQueryResults(current);
|
||||
}
|
||||
}
|
||||
|
||||
private async removeRemoteQuery(item: RemoteQueryHistoryItem): Promise<void> {
|
||||
// Remote queries can be removed locally, but not remotely.
|
||||
// The user must cancel the query on GitHub Actions explicitly.
|
||||
this.treeDataProvider.remove(item);
|
||||
void logger.log(`Deleted ${this.labelProvider.getLabel(item)}.`);
|
||||
if (item.status === QueryStatus.InProgress) {
|
||||
void logger.log('The variant analysis is still running on GitHub Actions. To cancel there, you must go to the workflow run in your browser.');
|
||||
}
|
||||
|
||||
await this.remoteQueriesManager.removeRemoteQuery(item.queryId);
|
||||
}
|
||||
|
||||
async handleSortByName() {
|
||||
if (this.treeDataProvider.sortOrder === SortOrder.NameAsc) {
|
||||
this.treeDataProvider.sortOrder = SortOrder.NameDesc;
|
||||
@@ -622,21 +724,21 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
): Promise<void> {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// TODO will support remote queries
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || finalSingleItem?.t !== 'local') {
|
||||
if (!this.assertSingleQuery(finalMultiSelect)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await window.showInputBox({
|
||||
prompt: 'Label:',
|
||||
placeHolder: '(use default)',
|
||||
value: finalSingleItem.label,
|
||||
placeHolder: `(use default: ${this.queryHistoryConfigListener.format})`,
|
||||
value: finalSingleItem.userSpecifiedLabel ?? '',
|
||||
title: 'Set query label',
|
||||
prompt: 'Set the query history item label. See the description of the codeQL.queryHistory.format setting for more information.',
|
||||
});
|
||||
// undefined response means the user cancelled the dialog; don't change anything
|
||||
if (response !== undefined) {
|
||||
// Interpret empty string response as 'go back to using default'
|
||||
finalSingleItem.initialInfo.userSpecifiedLabel = response === '' ? undefined : response;
|
||||
this.treeDataProvider.refresh();
|
||||
finalSingleItem.userSpecifiedLabel = response === '' ? undefined : response;
|
||||
await this.refreshTreeView();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -663,7 +765,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
await this.doCompareCallback(from as CompletedLocalQueryInfo, to as CompletedLocalQueryInfo);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -692,7 +794,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
} else {
|
||||
// show results on single click only if query is completed successfully.
|
||||
if (finalSingleItem.status === QueryStatus.Completed) {
|
||||
await this._onWillOpenQueryItem.fire(finalSingleItem);
|
||||
await this.openQueryResults(finalSingleItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -717,6 +819,18 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
async getQueryHistoryItemDirectory(queryHistoryItem: QueryHistoryInfo): Promise<string> {
|
||||
if (queryHistoryItem.t === 'local') {
|
||||
if (queryHistoryItem.completedQuery) {
|
||||
return queryHistoryItem.completedQuery.query.querySaveDir;
|
||||
}
|
||||
} else if (queryHistoryItem.t === 'remote') {
|
||||
return path.join(this.queryStorageDir, queryHistoryItem.queryId);
|
||||
}
|
||||
|
||||
throw new Error('Unable to get query directory');
|
||||
}
|
||||
|
||||
async handleOpenQueryDirectory(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
@@ -727,20 +841,83 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
let p: string | undefined;
|
||||
let externalFilePath: string | undefined;
|
||||
if (finalSingleItem.t === 'local') {
|
||||
if (finalSingleItem.completedQuery) {
|
||||
p = finalSingleItem.completedQuery.query.querySaveDir;
|
||||
externalFilePath = path.join(finalSingleItem.completedQuery.query.querySaveDir, 'timestamp');
|
||||
}
|
||||
} else if (finalSingleItem.t === 'remote') {
|
||||
p = path.join(this.queryStorageDir, finalSingleItem.queryId);
|
||||
externalFilePath = path.join(this.queryStorageDir, finalSingleItem.queryId, 'timestamp');
|
||||
}
|
||||
|
||||
if (p) {
|
||||
if (externalFilePath) {
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
// timestamp file is missing (manually deleted?) try selecting the parent folder.
|
||||
// It's less nice, but at least it will work.
|
||||
externalFilePath = path.dirname(externalFilePath);
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
throw new Error(`Query directory does not exist: ${externalFilePath}`);
|
||||
}
|
||||
}
|
||||
try {
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(p));
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(externalFilePath));
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to open ${p}: ${e.message}`);
|
||||
throw new Error(`Failed to open ${externalFilePath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private warnNoEvalLog() {
|
||||
void showAndLogWarningMessage(`No evaluator log is available for this run. Perhaps it failed before evaluation, or you are running with a version of CodeQL before ' + ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
}
|
||||
|
||||
private warnNoEvalLogSummary() {
|
||||
void showAndLogWarningMessage(`Evaluator log summary and evaluator log are not available for this run. Perhaps they failed before evaluation, or you are running with a version of CodeQL before ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
}
|
||||
|
||||
private warnInProgressEvalLogSummary() {
|
||||
void showAndLogWarningMessage('The evaluator log summary is still being generated. Please try again later. The summary generation process is tracked in the "CodeQL Extension Log" view.');
|
||||
}
|
||||
|
||||
async handleShowEvalLog(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// Only applicable to an individual local query
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalSingleItem.evalLogLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogLocation);
|
||||
} else {
|
||||
this.warnNoEvalLog();
|
||||
}
|
||||
}
|
||||
|
||||
async handleShowEvalLogSummary(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// Only applicable to an individual local query
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalSingleItem.evalLogSummaryLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogSummaryLocation);
|
||||
}
|
||||
// Summary log file doesn't exist.
|
||||
else {
|
||||
if (finalSingleItem.evalLogLocation && fs.pathExists(finalSingleItem.evalLogLocation)) {
|
||||
// If raw log does exist, then the summary log is still being generated.
|
||||
this.warnInProgressEvalLogSummary();
|
||||
} else {
|
||||
this.warnNoEvalLogSummary();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -753,11 +930,20 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
// In the future, we may support cancelling remote queries, but this is not a short term plan.
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
(finalMultiSelect || [finalSingleItem]).forEach((item) => {
|
||||
if (item.status === QueryStatus.InProgress && item.t === 'local') {
|
||||
item.cancel();
|
||||
const selected = finalMultiSelect || [finalSingleItem];
|
||||
const results = selected.map(async item => {
|
||||
if (item.status === QueryStatus.InProgress) {
|
||||
if (item.t === 'local') {
|
||||
item.cancel();
|
||||
} else if (item.t === 'remote') {
|
||||
void showAndLogInformationMessage('Cancelling variant analysis. This may take a while.');
|
||||
const credentials = await this.getCredentials();
|
||||
await cancelRemoteQuery(credentials, item.remoteQuery);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(results);
|
||||
}
|
||||
|
||||
async handleShowQueryText(
|
||||
@@ -803,7 +989,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
query.resultsPaths.interpretedResultsPath
|
||||
);
|
||||
} else {
|
||||
const label = finalSingleItem.label;
|
||||
const label = this.labelProvider.getLabel(finalSingleItem);
|
||||
void showAndLogInformationMessage(
|
||||
`Query ${label} has no interpreted results.`
|
||||
);
|
||||
@@ -825,11 +1011,11 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
void this.tryOpenExternalFile(query.csvPath);
|
||||
return;
|
||||
}
|
||||
await query.exportCsvResults(this.qs, query.csvPath, () => {
|
||||
if (await query.exportCsvResults(this.qs, query.csvPath)) {
|
||||
void this.tryOpenExternalFile(
|
||||
query.csvPath
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async handleViewCsvAlerts(
|
||||
@@ -883,16 +1069,33 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
);
|
||||
}
|
||||
|
||||
async handleCopyRepoList(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// Remote queries only
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'remote') {
|
||||
return;
|
||||
}
|
||||
|
||||
await commands.executeCommand('codeQL.copyRepoList', finalSingleItem.queryId);
|
||||
}
|
||||
|
||||
async getQueryText(item: QueryHistoryInfo): Promise<string> {
|
||||
return item.t === 'local'
|
||||
? item.initialInfo.queryText
|
||||
: item.remoteQuery.queryText;
|
||||
}
|
||||
|
||||
async handleExportResults(): Promise<void> {
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults');
|
||||
}
|
||||
|
||||
addQuery(item: QueryHistoryInfo) {
|
||||
this.treeDataProvider.pushQuery(item);
|
||||
this.updateTreeViewSelectionIfVisible();
|
||||
this._onDidAddQueryItem.fire(item);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -920,11 +1123,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
try {
|
||||
await window.showTextDocument(uri, { preview: false });
|
||||
} catch (e) {
|
||||
const msg = getErrorMessage(e);
|
||||
if (
|
||||
e.message.includes(
|
||||
msg.includes(
|
||||
'Files above 50MB cannot be synchronized with extensions'
|
||||
) ||
|
||||
e.message.includes('too large to open')
|
||||
msg.includes('too large to open')
|
||||
) {
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`VS Code does not allow extensions to open files >50MB. This file
|
||||
@@ -937,13 +1141,13 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
try {
|
||||
await commands.executeCommand('revealFileInOS', uri);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage(`Could not open file ${fileLocation}`);
|
||||
void logger.log(e.message);
|
||||
void logger.log(e.stack);
|
||||
void logger.log(getErrorMessage(e));
|
||||
void logger.log(getErrorStack(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -991,7 +1195,7 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
otherQuery.initialInfo.databaseInfo.name === dbName
|
||||
)
|
||||
.map((item) => ({
|
||||
label: item.label,
|
||||
label: this.labelProvider.getLabel(item),
|
||||
description: (item as CompletedLocalQueryInfo).initialInfo.databaseInfo.name,
|
||||
detail: (item as CompletedLocalQueryInfo).completedQuery.statusString,
|
||||
query: item as CompletedLocalQueryInfo,
|
||||
@@ -1092,4 +1296,13 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
this.treeDataProvider.refresh();
|
||||
await this.writeQueryHistory();
|
||||
}
|
||||
|
||||
private async openQueryResults(item: QueryHistoryInfo) {
|
||||
if (item.t === 'local') {
|
||||
await this.localQueriesInterfaceManager.showResults(item as CompletedLocalQueryInfo, WebviewReveal.Forced, false);
|
||||
}
|
||||
else if (item.t === 'remote') {
|
||||
await this.remoteQueriesManager.openRemoteQueryResults(item.queryId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import {
|
||||
SarifInterpretationData,
|
||||
GraphInterpretationData
|
||||
} from './pure/interface-types';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
@@ -216,7 +215,8 @@ export class LocalQueryInfo {
|
||||
|
||||
public failureReason: string | undefined;
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
private config: QueryHistoryConfig | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
|
||||
@@ -224,11 +224,8 @@ export class LocalQueryInfo {
|
||||
*/
|
||||
constructor(
|
||||
public readonly initialInfo: InitialQueryInfo,
|
||||
config: QueryHistoryConfig,
|
||||
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
|
||||
) {
|
||||
this.setConfig(config);
|
||||
}
|
||||
) { /**/ }
|
||||
|
||||
cancel() {
|
||||
this.cancellationSource?.cancel();
|
||||
@@ -241,43 +238,12 @@ export class LocalQueryInfo {
|
||||
return this.initialInfo.start.toLocaleString(env.language);
|
||||
}
|
||||
|
||||
interpolate(template: string): string {
|
||||
const { resultCount = 0, statusString = 'in progress' } = this.completedQuery || {};
|
||||
const replacements: { [k: string]: string } = {
|
||||
t: this.startTime,
|
||||
q: this.getQueryName(),
|
||||
d: this.initialInfo.databaseInfo.name,
|
||||
r: resultCount.toString(),
|
||||
s: statusString,
|
||||
f: this.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
return template.replace(/%(.)/g, (match, key) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
get userSpecifiedLabel() {
|
||||
return this.initialInfo.userSpecifiedLabel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a label for this query that includes interpolated values.
|
||||
*/
|
||||
get label(): string {
|
||||
return this.interpolate(
|
||||
this.initialInfo.userSpecifiedLabel ?? this.config?.format ?? ''
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Avoids getting the default label for the query.
|
||||
* If there is a custom label for this query, interpolate and use that.
|
||||
* Otherwise, use the name of the query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(): string {
|
||||
return this.initialInfo.userSpecifiedLabel
|
||||
? this.interpolate(this.initialInfo.userSpecifiedLabel)
|
||||
: this.getQueryName();
|
||||
set userSpecifiedLabel(label: string | undefined) {
|
||||
this.initialInfo.userSpecifiedLabel = label;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -340,21 +306,4 @@ export class LocalQueryInfo {
|
||||
return QueryStatus.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The `config` property must not be serialized since it contains a listerner
|
||||
* for global configuration changes. Instead, It should be set when the query
|
||||
* is deserialized.
|
||||
*
|
||||
* @param config the global query history config object
|
||||
*/
|
||||
setConfig(config: QueryHistoryConfig) {
|
||||
// avoid serializing config property
|
||||
Object.defineProperty(this, 'config', {
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: true,
|
||||
value: config
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
|
||||
export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConfig): Promise<QueryHistoryInfo[]> {
|
||||
export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
return [];
|
||||
@@ -29,10 +29,6 @@ export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConf
|
||||
if (q.t === 'local') {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
|
||||
// The config object is a global, se we need to set it explicitly
|
||||
// and ensure it is not serialized to JSON.
|
||||
q.setConfig(config);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
@@ -44,7 +40,12 @@ export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConf
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
}
|
||||
} else if (q.t === 'remote') {
|
||||
// noop
|
||||
// A bug was introduced that didn't set the completed flag in query history
|
||||
// items. The following code makes sure that the flag is set in order to
|
||||
// "patch" older query history items.
|
||||
if (q.status === QueryStatus.Completed) {
|
||||
q.completed = true;
|
||||
}
|
||||
}
|
||||
return q;
|
||||
});
|
||||
@@ -64,7 +65,7 @@ export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConf
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', e.stack].join('\n'),
|
||||
fullMessage: ['Error loading query history.', getErrorStack(e)].join('\n'),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
@@ -94,6 +95,6 @@ export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: str
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${e.message}`);
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
args.push('--require-db-registration');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats()) {
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||
args.push('--old-eval-stats');
|
||||
}
|
||||
|
||||
@@ -258,3 +258,19 @@ export class QueryServerClient extends DisposableObject {
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
export function findJsonQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
ProgressCallback,
|
||||
UserCancellationException
|
||||
} from './commandRunner';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const QUICK_QUERIES_DIR_NAME = 'quick-queries';
|
||||
const QUICK_QUERY_QUERY_NAME = 'quick-query.ql';
|
||||
@@ -120,19 +121,26 @@ export async function displayQuickQuery(
|
||||
const quickQueryQlpackYaml: any = {
|
||||
name: 'vscode/quick-query',
|
||||
version: '1.0.0',
|
||||
libraryPathDependencies: [qlpack]
|
||||
dependencies: {
|
||||
[qlpack]: '*'
|
||||
}
|
||||
};
|
||||
await fs.writeFile(qlPackFile, QLPACK_FILE_HEADER + yaml.safeDump(quickQueryQlpackYaml), 'utf8');
|
||||
await fs.writeFile(qlPackFile, QLPACK_FILE_HEADER + yaml.dump(quickQueryQlpackYaml), 'utf8');
|
||||
}
|
||||
|
||||
if (shouldRewrite || !(await fs.pathExists(qlFile))) {
|
||||
await fs.writeFile(qlFile, getInitialQueryContents(dbItem.language, dbscheme), 'utf8');
|
||||
}
|
||||
|
||||
if (shouldRewrite) {
|
||||
await cliServer.clearCache();
|
||||
await cliServer.packInstall(queriesDir, true);
|
||||
}
|
||||
|
||||
await Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
throw new UserCancellationException(e.message);
|
||||
throw new UserCancellationException(getErrorMessage(e));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
@@ -143,6 +151,6 @@ async function checkShouldRewrite(qlPackFile: string, newDependency: string) {
|
||||
if (!(await fs.pathExists(qlPackFile))) {
|
||||
return true;
|
||||
}
|
||||
const qlPackContents: any = yaml.safeLoad(await fs.readFile(qlPackFile, 'utf8'));
|
||||
return qlPackContents.libraryPathDependencies?.[0] !== newDependency;
|
||||
const qlPackContents: any = yaml.load(await fs.readFile(qlPackFile, 'utf8'));
|
||||
return !qlPackContents.dependencies?.[newDependency];
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
@@ -6,10 +7,14 @@ import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { downloadArtifactFromLink } from './gh-actions-api-client';
|
||||
import { AnalysisSummary } from './shared/remote-query-result';
|
||||
import { AnalysisResults, AnalysisAlert } from './shared/analysis-result';
|
||||
import { AnalysisResults, AnalysisAlert, AnalysisRawResults } from './shared/analysis-result';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import { sarifParser } from '../sarif-parser';
|
||||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { asyncFilter, getErrorMessage } from '../pure/helpers-pure';
|
||||
import { createDownloadPath } from './download-link';
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for each remote query.
|
||||
@@ -18,6 +23,7 @@ export class AnalysesResultsManager {
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
readonly storagePath: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
@@ -40,13 +46,22 @@ export class AnalysesResultsManager {
|
||||
await this.downloadSingleAnalysisResults(analysisSummary, credentials, publishResults);
|
||||
}
|
||||
|
||||
public async downloadAnalysesResults(
|
||||
allAnalysesToDownload: AnalysisSummary[],
|
||||
token: CancellationToken | undefined,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void> = () => Promise.resolve()
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToDownload.filter(x => !this.isAnalysisInMemory(x));
|
||||
const analysesToDownload = allAnalysesToLoad.filter(x => !this.isAnalysisInMemory(x));
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
@@ -101,7 +116,10 @@ export class AnalysesResultsManager {
|
||||
const analysisResults: AnalysisResults = {
|
||||
nwo: analysis.nwo,
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
interpretedResults: [],
|
||||
resultCount: analysis.resultCount,
|
||||
starCount: analysis.starCount,
|
||||
lastUpdated: analysis.lastUpdated,
|
||||
};
|
||||
const queryId = analysis.downloadLink.queryId;
|
||||
const resultsForQuery = this.internalGetAnalysesResults(queryId);
|
||||
@@ -115,19 +133,29 @@ export class AnalysesResultsManager {
|
||||
artifactPath = await downloadArtifactFromLink(credentials, this.storagePath, analysis.downloadLink);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${e.message}`);
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(analysis.nwo, analysis.databaseSha);
|
||||
|
||||
let newAnaysisResults: AnalysisResults;
|
||||
if (path.extname(artifactPath) === '.sarif') {
|
||||
const queryResults = await this.readResults(artifactPath);
|
||||
const fileExtension = path.extname(artifactPath);
|
||||
if (fileExtension === '.sarif') {
|
||||
const queryResults = await this.readSarifResults(artifactPath, fileLinkPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
results: queryResults,
|
||||
interpretedResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else if (fileExtension === '.bqrs') {
|
||||
const queryResults = await this.readBqrsResults(artifactPath, fileLinkPrefix, analysis.sourceLocationPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
rawResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else {
|
||||
void this.logger.log('Cannot download results. Only alert and path queries are fully supported.');
|
||||
void this.logger.log(`Cannot download results. File type '${fileExtension}' not supported.`);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
status: 'Failed'
|
||||
@@ -137,11 +165,30 @@ export class AnalysesResultsManager {
|
||||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
private async readResults(filePath: string): Promise<AnalysisAlert[]> {
|
||||
|
||||
public async loadDownloadedAnalyses(
|
||||
allAnalysesToCheck: AnalysisSummary[]
|
||||
) {
|
||||
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, x => this.isAnalysisDownloaded(x));
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(analysis: AnalysisSummary): Promise<boolean> {
|
||||
return await fs.pathExists(createDownloadPath(this.storagePath, analysis.downloadLink));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string, sourceLocationPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix, sourceLocationPrefix);
|
||||
}
|
||||
|
||||
private async readSarifResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisAlert[]> {
|
||||
const sarifLog = await sarifParser(filePath);
|
||||
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog);
|
||||
if (processedSarif.errors) {
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(os.EOL)}`);
|
||||
}
|
||||
|
||||
@@ -151,4 +198,8 @@ export class AnalysesResultsManager {
|
||||
private isAnalysisInMemory(analysis: AnalysisSummary): boolean {
|
||||
return this.internalGetAnalysesResults(analysis.downloadLink.queryId).some(x => x.nwo === analysis.nwo);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(nwo: string, sha: string): string {
|
||||
return `https://github.com/${nwo}/blob/${sha}`;
|
||||
}
|
||||
}
|
||||
|
||||
36
extensions/ql-vscode/src/remote-queries/bqrs-processing.ts
Normal file
36
extensions/ql-vscode/src/remote-queries/bqrs-processing.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { Logger } from '../logging';
|
||||
import { transformBqrsResultSet } from '../pure/bqrs-cli-types';
|
||||
import { AnalysisRawResults } from './shared/analysis-result';
|
||||
import { MAX_RAW_RESULTS } from './shared/result-limits';
|
||||
|
||||
export async function extractRawResults(
|
||||
cliServer: CodeQLCliServer,
|
||||
logger: Logger,
|
||||
filePath: string,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
): Promise<AnalysisRawResults> {
|
||||
const bqrsInfo = await cliServer.bqrsInfo(filePath);
|
||||
const resultSets = bqrsInfo['result-sets'];
|
||||
|
||||
if (resultSets.length < 1) {
|
||||
throw new Error('No result sets found in results file.');
|
||||
}
|
||||
if (resultSets.length > 1) {
|
||||
void logger.log('Multiple result sets found in results file. Only the first one will be used.');
|
||||
}
|
||||
|
||||
const schema = resultSets[0];
|
||||
|
||||
const chunk = await cliServer.bqrsDecode(
|
||||
filePath,
|
||||
schema.name,
|
||||
{ pageSize: MAX_RAW_RESULTS });
|
||||
|
||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||
|
||||
const capped = !!chunk.next;
|
||||
|
||||
return { schema, resultSet, fileLinkPrefix, sourceLocationPrefix, capped };
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
*/
|
||||
@@ -23,3 +25,16 @@ export interface DownloadLink {
|
||||
*/
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a downloadLink to the path where the artifact should be stored.
|
||||
*
|
||||
* @param storagePath The base directory to store artifacts in.
|
||||
* @param downloadLink The DownloadLink
|
||||
* @param extension An optional file extension to append to the artifact (no `.`).
|
||||
*
|
||||
* @returns A full path to the download location of the artifact
|
||||
*/
|
||||
export function createDownloadPath(storagePath: string, downloadLink: DownloadLink, extension = '') {
|
||||
return path.join(storagePath, downloadLink.queryId, downloadLink.id + (extension ? `.${extension}` : ''));
|
||||
}
|
||||
|
||||
143
extensions/ql-vscode/src/remote-queries/export-results.ts
Normal file
143
extensions/ql-vscode/src/remote-queries/export-results.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { window, commands, Uri, ExtensionContext, QuickPickItem, workspace, ViewColumn } from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import {
|
||||
showInformationMessageWithAction,
|
||||
pluralize
|
||||
} from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { createGist } from './gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries-manager';
|
||||
import { generateMarkdown } from './remote-queries-markdown-generation';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisResults, sumAnalysesResults } from './shared/analysis-result';
|
||||
|
||||
/**
|
||||
* Exports the results of the currently-selected remote query.
|
||||
* The user is prompted to select the export format.
|
||||
*/
|
||||
export async function exportRemoteQueryResults(
|
||||
queryHistoryManager: QueryHistoryManager,
|
||||
remoteQueriesManager: RemoteQueriesManager,
|
||||
ctx: ExtensionContext,
|
||||
): Promise<void> {
|
||||
const queryHistoryItem = queryHistoryManager.getCurrentQueryHistoryItem();
|
||||
if (!queryHistoryItem || queryHistoryItem.t !== 'remote') {
|
||||
throw new Error('No variant analysis results currently open. To open results, click an item in the query history view.');
|
||||
} else if (!queryHistoryItem.completed) {
|
||||
throw new Error('Variant analysis results are not yet available.');
|
||||
}
|
||||
const queryId = queryHistoryItem.queryId;
|
||||
void logger.log(`Exporting variant analysis results for query: ${queryId}`);
|
||||
const query = queryHistoryItem.remoteQuery;
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(queryId);
|
||||
|
||||
const gistOption = {
|
||||
label: '$(ports-open-browser-icon) Create Gist (GitHub)',
|
||||
};
|
||||
const localMarkdownOption = {
|
||||
label: '$(markdown) Save as markdown',
|
||||
};
|
||||
const exportFormat = await determineExportFormat(gistOption, localMarkdownOption);
|
||||
|
||||
if (exportFormat === gistOption) {
|
||||
await exportResultsToGist(ctx, query, analysesResults);
|
||||
} else if (exportFormat === localMarkdownOption) {
|
||||
const queryDirectoryPath = await queryHistoryManager.getQueryHistoryItemDirectory(
|
||||
queryHistoryItem
|
||||
);
|
||||
await exportResultsToLocalMarkdown(queryDirectoryPath, query, analysesResults);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the format in which to export the results, from the given export options.
|
||||
*/
|
||||
async function determineExportFormat(
|
||||
...options: { label: string }[]
|
||||
): Promise<QuickPickItem> {
|
||||
const exportFormat = await window.showQuickPick(
|
||||
options,
|
||||
{
|
||||
placeHolder: 'Select export format',
|
||||
canPickMany: false,
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
if (!exportFormat || !exportFormat.label) {
|
||||
throw new UserCancellationException('No export format selected', true);
|
||||
}
|
||||
return exportFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and uploads the files as a secret gist.
|
||||
*/
|
||||
export async function exportResultsToGist(
|
||||
ctx: ExtensionContext,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const description = buildGistDescription(query, analysesResults);
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'gist');
|
||||
// Convert markdownFiles to the appropriate format for uploading to gist
|
||||
const gistFiles = markdownFiles.reduce((acc, cur) => {
|
||||
acc[`${cur.fileName}.md`] = { content: cur.content.join('\n') };
|
||||
return acc;
|
||||
}, {} as { [key: string]: { content: string } });
|
||||
|
||||
const gistUrl = await createGist(credentials, description, gistFiles);
|
||||
if (gistUrl) {
|
||||
const shouldOpenGist = await showInformationMessageWithAction(
|
||||
'Variant analysis results exported to gist.',
|
||||
'Open gist'
|
||||
);
|
||||
if (shouldOpenGist) {
|
||||
await commands.executeCommand('vscode.open', Uri.parse(gistUrl));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds Gist description
|
||||
* Ex: Empty Block (Go) x results (y repositories)
|
||||
*/
|
||||
const buildGistDescription = (query: RemoteQuery, analysesResults: AnalysisResults[]) => {
|
||||
const resultCount = sumAnalysesResults(analysesResults);
|
||||
const resultLabel = pluralize(resultCount, 'result', 'results');
|
||||
const repositoryLabel = query.repositoryCount ? `(${pluralize(query.repositoryCount, 'repository', 'repositories')})` : '';
|
||||
return `${query.queryName} (${query.language}) ${resultLabel} ${repositoryLabel}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and saves the files locally
|
||||
* in the query directory (where query results and metadata are also saved).
|
||||
*/
|
||||
async function exportResultsToLocalMarkdown(
|
||||
queryDirectoryPath: string,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
) {
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'local');
|
||||
const exportedResultsPath = path.join(queryDirectoryPath, 'exported-results');
|
||||
await fs.ensureDir(exportedResultsPath);
|
||||
for (const markdownFile of markdownFiles) {
|
||||
const filePath = path.join(exportedResultsPath, `${markdownFile.fileName}.md`);
|
||||
await fs.writeFile(filePath, markdownFile.content.join('\n'), 'utf8');
|
||||
}
|
||||
const shouldOpenExportedResults = await showInformationMessageWithAction(
|
||||
`Variant analysis results exported to \"${exportedResultsPath}\".`,
|
||||
'Open exported results'
|
||||
);
|
||||
if (shouldOpenExportedResults) {
|
||||
const summaryFilePath = path.join(exportedResultsPath, '_summary.md');
|
||||
const summaryFile = await workspace.openTextDocument(summaryFilePath);
|
||||
await window.showTextDocument(summaryFile, ViewColumn.One);
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(summaryFilePath));
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,23 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
sha?: string;
|
||||
results_count: number;
|
||||
bqrs_file_size: number;
|
||||
sarif_file_size?: number;
|
||||
source_location_prefix: string;
|
||||
}
|
||||
|
||||
interface ApiFailureIndexItem {
|
||||
@@ -41,7 +44,10 @@ export async function getRemoteQueryIndex(
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repoName, workflowRunId);
|
||||
const resultIndexArtifactId = getArtifactIDfromName('result-index', workflowUri, artifactList);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName('result-index', artifactList);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await getResultIndex(credentials, owner, repoName, resultIndexArtifactId);
|
||||
|
||||
const successes = resultIndex?.successes.map(item => {
|
||||
@@ -51,9 +57,11 @@ export async function getRemoteQueryIndex(
|
||||
id: item.id.toString(),
|
||||
artifactId: artifactId,
|
||||
nwo: item.nwo,
|
||||
sha: item.sha,
|
||||
resultCount: item.results_count,
|
||||
bqrsFileSize: item.bqrs_file_size,
|
||||
sarifFileSize: item.sarif_file_size
|
||||
sarifFileSize: item.sarif_file_size,
|
||||
sourceLocationPrefix: item.source_location_prefix
|
||||
} as RemoteQuerySuccessIndexItem;
|
||||
});
|
||||
|
||||
@@ -72,6 +80,18 @@ export async function getRemoteQueryIndex(
|
||||
};
|
||||
}
|
||||
|
||||
export async function cancelRemoteQuery(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery
|
||||
): Promise<void> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const { actionsWorkflowRunId, controllerRepository: { owner, name } } = remoteQuery;
|
||||
const response = await octokit.request(`POST /repos/${owner}/${name}/actions/runs/${actionsWorkflowRunId}/cancel`);
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error cancelling variant analysis: ${response.status} ${response?.data?.message || ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadArtifactFromLink(
|
||||
credentials: Credentials,
|
||||
storagePath: string,
|
||||
@@ -80,14 +100,14 @@ export async function downloadArtifactFromLink(
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const extractedPath = path.join(storagePath, downloadLink.queryId, downloadLink.id);
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
const zipFilePath = path.join(storagePath, downloadLink.queryId, `${downloadLink.id}.zip`);
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
// Extract the zipped artifact.
|
||||
@@ -209,15 +229,29 @@ function getArtifactIDfromName(
|
||||
workflowUri: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number {
|
||||
const artifact = artifacts.find(a => a.name === artifactName);
|
||||
const artifactId = tryGetArtifactIDfromName(artifactName, artifacts);
|
||||
|
||||
if (!artifact) {
|
||||
if (!artifactId) {
|
||||
const errorMessage =
|
||||
`Could not find artifact with name ${artifactName} in workflow ${workflowUri}.
|
||||
Please check whether the workflow run has successfully completed.`;
|
||||
throw Error(errorMessage);
|
||||
}
|
||||
|
||||
return artifactId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name, if it exists.
|
||||
*/
|
||||
function tryGetArtifactIDfromName(
|
||||
artifactName: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number | undefined {
|
||||
const artifact = artifacts.find(a => a.name === artifactName);
|
||||
|
||||
return artifact?.id;
|
||||
}
|
||||
|
||||
@@ -265,18 +299,115 @@ function getWorkflowError(conclusion: string | null): string {
|
||||
}
|
||||
|
||||
if (conclusion === 'cancelled') {
|
||||
return 'The remote query execution was cancelled.';
|
||||
return 'Variant analysis execution was cancelled.';
|
||||
}
|
||||
|
||||
if (conclusion === 'timed_out') {
|
||||
return 'The remote query execution timed out.';
|
||||
return 'Variant analysis execution timed out.';
|
||||
}
|
||||
|
||||
if (conclusion === 'failure') {
|
||||
// TODO: Get the actual error from the workflow or potentially
|
||||
// from an artifact from the action itself.
|
||||
return 'The remote query execution has failed.';
|
||||
return 'Variant analysis execution has failed.';
|
||||
}
|
||||
|
||||
return `Unexpected query execution conclusion: ${conclusion}`;
|
||||
return `Unexpected variant analysis execution conclusion: ${conclusion}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a gist with the given description and files.
|
||||
* Returns the URL of the created gist.
|
||||
*/
|
||||
export async function createGist(
|
||||
credentials: Credentials,
|
||||
description: string,
|
||||
files: { [key: string]: { content: string } }
|
||||
): Promise<string | undefined> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('POST /gists', {
|
||||
description,
|
||||
files,
|
||||
public: false,
|
||||
});
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error exporting variant analysis results: ${response.status} ${response?.data || ''}`);
|
||||
}
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
const repositoriesMetadataQuery = `query Stars($repos: String!, $pageSize: Int!, $cursor: String) {
|
||||
search(
|
||||
query: $repos
|
||||
type: REPOSITORY
|
||||
first: $pageSize
|
||||
after: $cursor
|
||||
) {
|
||||
edges {
|
||||
node {
|
||||
... on Repository {
|
||||
name
|
||||
owner {
|
||||
login
|
||||
}
|
||||
stargazerCount
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
cursor
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
type RepositoriesMetadataQueryResponse = {
|
||||
search: {
|
||||
edges: {
|
||||
cursor: string;
|
||||
node: {
|
||||
name: string;
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
stargazerCount: number;
|
||||
updatedAt: string; // Actually a ISO Date string
|
||||
}
|
||||
}[]
|
||||
}
|
||||
};
|
||||
|
||||
export type RepositoriesMetadata = Record<string, { starCount: number, lastUpdated: number }>
|
||||
|
||||
export async function getRepositoriesMetadata(credentials: Credentials, nwos: string[], pageSize = 100): Promise<RepositoriesMetadata> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const repos = `repo:${nwos.join(' repo:')} fork:true`;
|
||||
let cursor = null;
|
||||
const metadata: RepositoriesMetadata = {};
|
||||
try {
|
||||
do {
|
||||
const response: RepositoriesMetadataQueryResponse = await octokit.graphql({
|
||||
query: repositoriesMetadataQuery,
|
||||
repos,
|
||||
pageSize,
|
||||
cursor
|
||||
});
|
||||
cursor = response.search.edges.length === pageSize ? response.search.edges[pageSize - 1].cursor : null;
|
||||
|
||||
for (const edge of response.search.edges) {
|
||||
const node = edge.node;
|
||||
const owner = node.owner.login;
|
||||
const name = node.name;
|
||||
const starCount = node.stargazerCount;
|
||||
// lastUpdated is always negative since it happened in the past.
|
||||
const lastUpdated = new Date(node.updatedAt).getTime() - Date.now();
|
||||
metadata[`${owner}/${name}`] = {
|
||||
starCount, lastUpdated
|
||||
};
|
||||
}
|
||||
|
||||
} while (cursor);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Error retrieving repository metadata for variant analysis: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ import {
|
||||
ViewColumn,
|
||||
Uri,
|
||||
workspace,
|
||||
extensions,
|
||||
commands,
|
||||
commands
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -14,25 +13,32 @@ import {
|
||||
ToRemoteQueriesMessage,
|
||||
FromRemoteQueriesMessage,
|
||||
RemoteQueryDownloadAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage,
|
||||
RemoteQueryViewAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
} from '../pure/interface-types';
|
||||
import { Logger } from '../logging';
|
||||
import { getHtmlForWebview } from '../interface-utils';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { AnalysisSummary, RemoteQueryResult } from './remote-query-result';
|
||||
import {
|
||||
AnalysisSummary,
|
||||
RemoteQueryResult,
|
||||
sumAnalysisSummariesResults
|
||||
} from './remote-query-result';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryResult as RemoteQueryResultViewModel } from './shared/remote-query-result';
|
||||
import { AnalysisSummary as AnalysisResultViewModel } from './shared/remote-query-result';
|
||||
import {
|
||||
AnalysisSummary as AnalysisResultViewModel,
|
||||
RemoteQueryResult as RemoteQueryResultViewModel
|
||||
} from './shared/remote-query-result';
|
||||
import { showAndLogWarningMessage } from '../helpers';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { SHOW_QUERY_TEXT_MSG } from '../query-history';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
import { humanizeUnit } from '../pure/time';
|
||||
|
||||
export class RemoteQueriesInterfaceManager {
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
private currentQueryId: string | undefined;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
@@ -41,7 +47,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
private readonly analysesResultsManager: AnalysesResultsManager
|
||||
) {
|
||||
this.panelLoadedCallBacks.push(() => {
|
||||
void logger.log('Remote queries view loaded');
|
||||
void logger.log('Variant analysis results view loaded');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -49,12 +55,18 @@ export class RemoteQueriesInterfaceManager {
|
||||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
this.currentQueryId = queryResult.queryId;
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setRemoteQueryResult',
|
||||
queryResult: this.buildViewModel(query, queryResult)
|
||||
queryResult: model
|
||||
});
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId));
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(model.analysisSummaries);
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId), queryResult.queryId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,19 +79,21 @@ export class RemoteQueriesInterfaceManager {
|
||||
*/
|
||||
private buildViewModel(query: RemoteQuery, queryResult: RemoteQueryResult): RemoteQueryResultViewModel {
|
||||
const queryFileName = path.basename(query.queryFilePath);
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const totalResultCount = sumAnalysisSummariesResults(queryResult.analysisSummaries);
|
||||
const executionDuration = this.getDuration(queryResult.executionEndTime, query.executionStartTime);
|
||||
const analysisSummaries = this.buildAnalysisSummaries(queryResult.analysisSummaries);
|
||||
const totalRepositoryCount = queryResult.analysisSummaries.length;
|
||||
const affectedRepositories = queryResult.analysisSummaries.filter(r => r.resultCount > 0);
|
||||
|
||||
return {
|
||||
queryId: queryResult.queryId,
|
||||
queryTitle: query.queryName,
|
||||
queryFileName: queryFileName,
|
||||
queryFilePath: query.queryFilePath,
|
||||
queryText: query.queryText,
|
||||
language: query.language,
|
||||
workflowRunUrl: `https://github.com/${query.controllerRepository.owner}/${query.controllerRepository.name}/actions/runs/${query.actionsWorkflowRunId}`,
|
||||
totalRepositoryCount: query.repositories.length,
|
||||
totalRepositoryCount: totalRepositoryCount,
|
||||
affectedRepositoryCount: affectedRepositories.length,
|
||||
totalResultCount: totalResultCount,
|
||||
executionTimestamp: this.formatDate(query.executionStartTime),
|
||||
@@ -94,7 +108,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'remoteQueriesView',
|
||||
'Remote Query Results',
|
||||
'CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
@@ -103,12 +117,14 @@ export class RemoteQueriesInterfaceManager {
|
||||
localResourceRoots: [
|
||||
Uri.file(this.analysesResultsManager.storagePath),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'node_modules/@vscode/codicons/dist')),
|
||||
],
|
||||
}
|
||||
));
|
||||
this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.currentQueryId = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
@@ -126,10 +142,16 @@ export class RemoteQueriesInterfaceManager {
|
||||
ctx.asAbsolutePath('out/remote-queries/view/remoteQueries.css')
|
||||
);
|
||||
|
||||
// Allows use of the VS Code "codicons" icon set.
|
||||
// See https://github.com/microsoft/vscode-codicons
|
||||
const codiconsPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('node_modules/@vscode/codicons/dist/codicon.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[baseStylesheetUriOnDisk, stylesheetPathOnDisk],
|
||||
[baseStylesheetUriOnDisk, stylesheetPathOnDisk, codiconsPathOnDisk],
|
||||
true
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
@@ -189,7 +211,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
break;
|
||||
case 'remoteQueryError':
|
||||
void this.logger.log(
|
||||
`Remote query error: ${msg.error}`
|
||||
`Variant analysis error: ${msg.error}`
|
||||
);
|
||||
break;
|
||||
case 'openFile':
|
||||
@@ -198,14 +220,17 @@ export class RemoteQueriesInterfaceManager {
|
||||
case 'openVirtualFile':
|
||||
await this.openVirtualFile(msg.queryText);
|
||||
break;
|
||||
case 'copyRepoList':
|
||||
await commands.executeCommand('codeQL.copyRepoList', msg.queryId);
|
||||
break;
|
||||
case 'remoteQueryDownloadAnalysisResults':
|
||||
await this.downloadAnalysisResults(msg);
|
||||
break;
|
||||
case 'remoteQueryDownloadAllAnalysesResults':
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case 'remoteQueryViewAnalysisResults':
|
||||
await this.viewAnalysisResults(msg);
|
||||
case 'remoteQueryExportResults':
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults');
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
@@ -213,48 +238,25 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
|
||||
private async downloadAnalysisResults(msg: RemoteQueryDownloadAnalysisResultsMessage): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.downloadAnalysisResults(
|
||||
msg.analysisSummary,
|
||||
results => this.setAnalysisResults(results));
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
private async downloadAllAnalysesResults(msg: RemoteQueryDownloadAllAnalysesResultsMessage): Promise<void> {
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
results => this.setAnalysisResults(results));
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
private async viewAnalysisResults(msg: RemoteQueryViewAnalysisResultsMessage): Promise<void> {
|
||||
const downloadLink = msg.analysisSummary.downloadLink;
|
||||
const filePath = path.join(this.analysesResultsManager.storagePath, downloadLink.queryId, downloadLink.id, downloadLink.innerFilePath || '');
|
||||
|
||||
const sarifViewerExtensionId = 'MS-SarifVSCode.sarif-viewer';
|
||||
|
||||
const sarifExt = extensions.getExtension(sarifViewerExtensionId);
|
||||
if (!sarifExt) {
|
||||
// Ask the user if they want to install the extension to view the results.
|
||||
void commands.executeCommand('workbench.extensions.installExtension', sarifViewerExtensionId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!sarifExt.isActive) {
|
||||
await sarifExt.activate();
|
||||
}
|
||||
|
||||
// Clear any previous results before showing new results
|
||||
await sarifExt.exports.closeAllLogs();
|
||||
|
||||
await sarifExt.exports.openLogs([
|
||||
Uri.file(filePath),
|
||||
]);
|
||||
}
|
||||
|
||||
public async setAnalysisResults(analysesResults: AnalysisResults[]): Promise<void> {
|
||||
if (this.panel?.active) {
|
||||
public async setAnalysisResults(analysesResults: AnalysisResults[], queryId: string | undefined): Promise<void> {
|
||||
if (this.panel?.active && this.currentQueryId === queryId) {
|
||||
await this.postMessage({
|
||||
t: 'setAnalysesResults',
|
||||
analysesResults: analysesResults
|
||||
analysesResults
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -265,23 +267,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
|
||||
private getDuration(startTime: number, endTime: number): string {
|
||||
const diffInMs = startTime - endTime;
|
||||
return this.formatDuration(diffInMs);
|
||||
}
|
||||
|
||||
private formatDuration(ms: number): string {
|
||||
const seconds = ms / 1000;
|
||||
const minutes = seconds / 60;
|
||||
const hours = minutes / 60;
|
||||
const days = hours / 24;
|
||||
if (days > 1) {
|
||||
return `${days.toFixed(2)} days`;
|
||||
} else if (hours > 1) {
|
||||
return `${hours.toFixed(2)} hours`;
|
||||
} else if (minutes > 1) {
|
||||
return `${minutes.toFixed(2)} minutes`;
|
||||
} else {
|
||||
return `${seconds.toFixed(2)} seconds`;
|
||||
}
|
||||
return humanizeUnit(diffInMs);
|
||||
}
|
||||
|
||||
private formatDate = (millis: number): string => {
|
||||
@@ -319,9 +305,13 @@ export class RemoteQueriesInterfaceManager {
|
||||
|
||||
return sortedAnalysisSummaries.map((analysisResult) => ({
|
||||
nwo: analysisResult.nwo,
|
||||
databaseSha: analysisResult.databaseSha || 'HEAD',
|
||||
resultCount: analysisResult.resultCount,
|
||||
downloadLink: analysisResult.downloadLink,
|
||||
fileSize: this.formatFileSize(analysisResult.fileSizeInBytes)
|
||||
sourceLocationPrefix: analysisResult.sourceLocationPrefix,
|
||||
fileSize: this.formatFileSize(analysisResult.fileSizeInBytes),
|
||||
starCount: analysisResult.starCount,
|
||||
lastUpdated: analysisResult.lastUpdated
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,59 @@
|
||||
import { CancellationToken, commands, ExtensionContext, Uri, window } from 'vscode';
|
||||
import { CancellationToken, commands, EventEmitter, ExtensionContext, Uri, env, window } from 'vscode';
|
||||
import { nanoid } from 'nanoid';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { createTimestampFile, showAndLogErrorMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { createTimestampFile, showAndLogErrorMessage, showAndLogInformationMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { Logger } from '../logging';
|
||||
import { runRemoteQuery } from './run-remote-query';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries-interface';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueriesMonitor } from './remote-queries-monitor';
|
||||
import { getRemoteQueryIndex } from './gh-actions-api-client';
|
||||
import { getRemoteQueryIndex, getRepositoriesMetadata, RepositoriesMetadata } from './gh-actions-api-client';
|
||||
import { RemoteQueryResultIndex } from './remote-query-result-index';
|
||||
import { RemoteQueryResult } from './remote-query-result';
|
||||
import { RemoteQueryResult, sumAnalysisSummariesResults } from './remote-query-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { RemoteQueryHistoryItem } from './remote-query-history-item';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { QueryHistoryInfo } from '../query-results';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
const autoDownloadMaxSize = 300 * 1024;
|
||||
const autoDownloadMaxCount = 100;
|
||||
|
||||
const noop = () => { /* do nothing */ };
|
||||
|
||||
export interface NewQueryEvent {
|
||||
queryId: string;
|
||||
query: RemoteQuery
|
||||
}
|
||||
|
||||
export interface RemovedQueryEvent {
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface UpdatedQueryStatusEvent {
|
||||
queryId: string;
|
||||
status: QueryStatus;
|
||||
failureReason?: string;
|
||||
repositoryCount?: number;
|
||||
resultCount?: number;
|
||||
}
|
||||
|
||||
export class RemoteQueriesManager extends DisposableObject {
|
||||
public readonly onRemoteQueryAdded;
|
||||
public readonly onRemoteQueryRemoved;
|
||||
public readonly onRemoteQueryStatusUpdate;
|
||||
|
||||
private readonly remoteQueryAddedEventEmitter;
|
||||
private readonly remoteQueryRemovedEventEmitter;
|
||||
private readonly remoteQueryStatusUpdateEventEmitter;
|
||||
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly interfaceManager: RemoteQueriesInterfaceManager;
|
||||
@@ -36,54 +61,51 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly qhm: QueryHistoryManager,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
super();
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, storagePath, logger);
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, cliServer, storagePath, logger);
|
||||
this.interfaceManager = new RemoteQueriesInterfaceManager(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(ctx, logger);
|
||||
|
||||
// Handle events from the query history manager
|
||||
this.push(this.qhm.onDidAddQueryItem(this.handleAddQueryItem.bind(this)));
|
||||
this.push(this.qhm.onDidRemoveQueryItem(this.handleRemoveQueryItem.bind(this)));
|
||||
this.push(this.qhm.onWillOpenQueryItem(this.handleOpenQueryItem.bind(this)));
|
||||
this.remoteQueryAddedEventEmitter = this.push(new EventEmitter<NewQueryEvent>());
|
||||
this.remoteQueryRemovedEventEmitter = this.push(new EventEmitter<RemovedQueryEvent>());
|
||||
this.remoteQueryStatusUpdateEventEmitter = this.push(new EventEmitter<UpdatedQueryStatusEvent>());
|
||||
this.onRemoteQueryAdded = this.remoteQueryAddedEventEmitter.event;
|
||||
this.onRemoteQueryRemoved = this.remoteQueryRemovedEventEmitter.event;
|
||||
this.onRemoteQueryStatusUpdate = this.remoteQueryStatusUpdateEventEmitter.event;
|
||||
}
|
||||
|
||||
private async handleAddQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
if (!(await this.queryHistoryItemExists(queryItem))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace. We should remove it from the history manager.
|
||||
await this.qhm.handleRemoveHistoryItem(queryItem);
|
||||
} else if (queryItem.status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
await commands.executeCommand('codeQL.monitorRemoteQuery', queryItem);
|
||||
}
|
||||
public async rehydrateRemoteQuery(queryId: string, query: RemoteQuery, status: QueryStatus) {
|
||||
if (!(await this.queryRecordExists(queryId))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace.
|
||||
this.remoteQueryRemovedEventEmitter.fire({ queryId });
|
||||
} else if (status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
await commands.executeCommand('codeQL.monitorRemoteQuery', queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
private async handleRemoveQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
this.analysesResultsManager.removeAnalysesResults(queryItem.queryId);
|
||||
await this.removeStorageDirectory(queryItem);
|
||||
}
|
||||
public async removeRemoteQuery(queryId: string) {
|
||||
this.analysesResultsManager.removeAnalysesResults(queryId);
|
||||
await this.removeStorageDirectory(queryId);
|
||||
}
|
||||
|
||||
private async handleOpenQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
try {
|
||||
const remoteQueryResult = await this.retrieveJsonFile(queryItem, 'query-result.json') as RemoteQueryResult;
|
||||
// open results in the background
|
||||
void this.openResults(queryItem.remoteQuery, remoteQueryResult).then(
|
||||
noop,
|
||||
err => void showAndLogErrorMessage(err)
|
||||
);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Could not open query results. ${e}`);
|
||||
}
|
||||
public async openRemoteQueryResults(queryId: string) {
|
||||
try {
|
||||
const remoteQuery = await this.retrieveJsonFile(queryId, 'query.json') as RemoteQuery;
|
||||
const remoteQueryResult = await this.retrieveJsonFile(queryId, 'query-result.json') as RemoteQueryResult;
|
||||
|
||||
// Open results in the background
|
||||
void this.openResults(remoteQuery, remoteQueryResult).then(
|
||||
noop,
|
||||
err => void showAndLogErrorMessage(err)
|
||||
);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Could not open query results. ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,63 +127,41 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
const query = querySubmission.query;
|
||||
const queryId = this.createQueryId(query.queryName);
|
||||
|
||||
const queryHistoryItem: RemoteQueryHistoryItem = {
|
||||
t: 'remote',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
queryId,
|
||||
label: query.queryName,
|
||||
remoteQuery: query,
|
||||
};
|
||||
await this.prepareStorageDirectory(queryHistoryItem);
|
||||
await this.storeJsonFile(queryHistoryItem, 'query.json', query);
|
||||
await this.prepareStorageDirectory(queryId);
|
||||
await this.storeJsonFile(queryId, 'query.json', query);
|
||||
|
||||
this.qhm.addQuery(queryHistoryItem);
|
||||
await this.qhm.refreshTreeView();
|
||||
this.remoteQueryAddedEventEmitter.fire({ queryId, query });
|
||||
void commands.executeCommand('codeQL.monitorRemoteQuery', queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
public async monitorRemoteQuery(
|
||||
queryItem: RemoteQueryHistoryItem,
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(queryItem.remoteQuery, cancellationToken);
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(remoteQuery, cancellationToken);
|
||||
|
||||
const executionEndTime = Date.now();
|
||||
|
||||
if (queryWorkflowResult.status === 'CompletedSuccessfully') {
|
||||
const resultIndex = await getRemoteQueryIndex(credentials, queryItem.remoteQuery);
|
||||
queryItem.completed = true;
|
||||
if (resultIndex) {
|
||||
queryItem.status = QueryStatus.Completed;
|
||||
const queryResult = this.mapQueryResult(executionEndTime, resultIndex, queryItem.queryId);
|
||||
|
||||
await this.storeJsonFile(queryItem, 'query-result.json', queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(queryItem.remoteQuery, queryResult).then(
|
||||
noop,
|
||||
err => {
|
||||
void showAndLogErrorMessage(err);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
void showAndLogErrorMessage(`There was an issue retrieving the result for the query ${queryItem.label}`);
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
}
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
} else if (queryWorkflowResult.status === 'CompletedUnsuccessfully') {
|
||||
queryItem.failureReason = queryWorkflowResult.error;
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
void showAndLogErrorMessage(`Remote query execution failed. Error: ${queryWorkflowResult.error}`);
|
||||
if (queryWorkflowResult.error?.includes('cancelled')) {
|
||||
// Workflow was cancelled on the server
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: 'Cancelled' });
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: queryWorkflowResult.error });
|
||||
void showAndLogErrorMessage(`Variant analysis execution failed. Error: ${queryWorkflowResult.error}`);
|
||||
}
|
||||
} else if (queryWorkflowResult.status === 'Cancelled') {
|
||||
queryItem.failureReason = 'Cancelled';
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
void showAndLogErrorMessage('Remote query monitoring was cancelled');
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: 'Cancelled' });
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else if (queryWorkflowResult.status === 'InProgress') {
|
||||
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
|
||||
void showAndLogErrorMessage(`Unexpected status: ${queryWorkflowResult.status}`);
|
||||
@@ -169,7 +169,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
// Ensure all cases are covered
|
||||
assertNever(queryWorkflowResult.status);
|
||||
}
|
||||
await this.qhm.refreshTreeView();
|
||||
}
|
||||
|
||||
public async autoDownloadRemoteQueryResults(
|
||||
@@ -181,28 +180,56 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
.slice(0, autoDownloadMaxCount)
|
||||
.map(a => ({
|
||||
nwo: a.nwo,
|
||||
databaseSha: a.databaseSha,
|
||||
resultCount: a.resultCount,
|
||||
sourceLocationPrefix: a.sourceLocationPrefix,
|
||||
downloadLink: a.downloadLink,
|
||||
fileSize: String(a.fileSizeInBytes)
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results));
|
||||
results => this.interfaceManager.setAnalysisResults(results, queryResult.queryId));
|
||||
}
|
||||
|
||||
private mapQueryResult(executionEndTime: number, resultIndex: RemoteQueryResultIndex, queryId: string): RemoteQueryResult {
|
||||
public async copyRemoteQueryRepoListToClipboard(queryId: string) {
|
||||
const queryResult = await this.getRemoteQueryResult(queryId);
|
||||
const repos = queryResult.analysisSummaries
|
||||
.filter(a => a.resultCount > 0)
|
||||
.map(a => a.nwo);
|
||||
|
||||
if (repos.length > 0) {
|
||||
const text = [
|
||||
'"new-repo-list": [',
|
||||
...repos.slice(0, -1).map(repo => ` "${repo}",`),
|
||||
` "${repos[repos.length - 1]}"`,
|
||||
']'
|
||||
];
|
||||
|
||||
await env.clipboard.writeText(text.join(os.EOL));
|
||||
}
|
||||
}
|
||||
|
||||
private mapQueryResult(
|
||||
executionEndTime: number,
|
||||
resultIndex: RemoteQueryResultIndex,
|
||||
queryId: string,
|
||||
metadata: RepositoriesMetadata
|
||||
): RemoteQueryResult {
|
||||
const analysisSummaries = resultIndex.successes.map(item => ({
|
||||
nwo: item.nwo,
|
||||
databaseSha: item.sha || 'HEAD',
|
||||
resultCount: item.resultCount,
|
||||
sourceLocationPrefix: item.sourceLocationPrefix,
|
||||
fileSizeInBytes: item.sarifFileSize ? item.sarifFileSize : item.bqrsFileSize,
|
||||
starCount: metadata[item.nwo]?.starCount,
|
||||
lastUpdated: metadata[item.nwo]?.lastUpdated,
|
||||
downloadLink: {
|
||||
id: item.artifactId.toString(),
|
||||
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
|
||||
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs',
|
||||
queryId,
|
||||
queryId
|
||||
} as DownloadLink
|
||||
}));
|
||||
const analysisFailures = resultIndex.failures.map(item => ({
|
||||
@@ -223,8 +250,9 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const message = `Query "${query.queryName}" run on ${query.repositories.length} repositories and returned ${totalResultCount} results`;
|
||||
const totalResultCount = sumAnalysisSummariesResults(queryResult.analysisSummaries);
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
|
||||
if (shouldOpenView) {
|
||||
@@ -239,7 +267,6 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
*/
|
||||
private createQueryId(queryName: string): string {
|
||||
return `${queryName}-${nanoid()}`;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -248,29 +275,87 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
* used by the query history manager to determine when the directory
|
||||
* should be deleted.
|
||||
*
|
||||
* @param queryName The name of the query that was run.
|
||||
*/
|
||||
private async prepareStorageDirectory(queryHistoryItem: RemoteQueryHistoryItem): Promise<void> {
|
||||
await createTimestampFile(path.join(this.storagePath, queryHistoryItem.queryId));
|
||||
private async prepareStorageDirectory(queryId: string): Promise<void> {
|
||||
await createTimestampFile(path.join(this.storagePath, queryId));
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string, obj: T): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
|
||||
private async getRemoteQueryResult(queryId: string): Promise<RemoteQueryResult> {
|
||||
return await this.retrieveJsonFile<RemoteQueryResult>(queryId, 'query-result.json');
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(queryId: string, fileName: string, obj: T): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
await fs.writeFile(filePath, JSON.stringify(obj, null, 2), 'utf8');
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string): Promise<T> {
|
||||
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
|
||||
private async retrieveJsonFile<T>(queryId: string, fileName: string): Promise<T> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
return JSON.parse(await fs.readFile(filePath, 'utf8'));
|
||||
}
|
||||
|
||||
private async removeStorageDirectory(queryItem: RemoteQueryHistoryItem): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryItem.queryId);
|
||||
private async removeStorageDirectory(queryId: string): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
await fs.remove(filePath);
|
||||
}
|
||||
|
||||
private async queryHistoryItemExists(queryItem: RemoteQueryHistoryItem): Promise<boolean> {
|
||||
const filePath = path.join(this.storagePath, queryItem.queryId);
|
||||
private async queryRecordExists(queryId: string): Promise<boolean> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
return await fs.pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether there's a result index artifact available for the given query.
|
||||
* If so, set the query status to `Completed` and auto-download the results.
|
||||
*/
|
||||
private async downloadAvailableResults(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
credentials: Credentials,
|
||||
executionEndTime: number
|
||||
): Promise<void> {
|
||||
const resultIndex = await getRemoteQueryIndex(credentials, remoteQuery);
|
||||
if (resultIndex) {
|
||||
const metadata = await this.getRepositoriesMetadata(resultIndex, credentials);
|
||||
const queryResult = this.mapQueryResult(executionEndTime, resultIndex, queryId, metadata);
|
||||
const resultCount = sumAnalysisSummariesResults(queryResult.analysisSummaries);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({
|
||||
queryId,
|
||||
status: QueryStatus.Completed,
|
||||
repositoryCount: queryResult.analysisSummaries.length,
|
||||
resultCount
|
||||
});
|
||||
|
||||
await this.storeJsonFile(queryId, 'query-result.json', queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(remoteQuery, queryResult).then(
|
||||
noop,
|
||||
err => {
|
||||
void showAndLogErrorMessage(err);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const controllerRepo = `${remoteQuery.controllerRepository.owner}/${remoteQuery.controllerRepository.name}`;
|
||||
const workflowRunUrl = `https://github.com/${controllerRepo}/actions/runs/${remoteQuery.actionsWorkflowRunId}`;
|
||||
void showAndLogErrorMessage(
|
||||
`There was an issue retrieving the result for the query [${remoteQuery.queryName}](${workflowRunUrl}).`
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed });
|
||||
}
|
||||
}
|
||||
|
||||
private async getRepositoriesMetadata(resultIndex: RemoteQueryResultIndex, credentials: Credentials) {
|
||||
const nwos = resultIndex.successes.map(s => s.nwo);
|
||||
return await getRepositoriesMetadata(credentials, nwos);
|
||||
}
|
||||
|
||||
// Pulled from the analysis results manager, so that we can get access to
|
||||
// analyses results from the "export results" command.
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.analysesResultsManager.getAnalysesResults(queryId)];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,332 @@
|
||||
import { CellValue } from '../pure/bqrs-cli-types';
|
||||
import { tryGetRemoteLocation } from '../pure/bqrs-utils';
|
||||
import { createRemoteFileRef } from '../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../pure/sarif-utils';
|
||||
import { convertNonPrintableChars } from '../text-utils';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisAlert, AnalysisRawResults, AnalysisResults, CodeSnippet, FileLink, getAnalysisResultCount, HighlightedRegion } from './shared/analysis-result';
|
||||
|
||||
export type MarkdownLinkType = 'local' | 'gist';
|
||||
|
||||
export interface MarkdownFile {
|
||||
fileName: string;
|
||||
content: string[]; // Each array item is a line of the markdown file.
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates markdown files with variant analysis results.
|
||||
*/
|
||||
export function generateMarkdown(
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
linkType: MarkdownLinkType
|
||||
): MarkdownFile[] {
|
||||
const resultsFiles: MarkdownFile[] = [];
|
||||
// Generate summary file with links to individual files
|
||||
const summaryFile: MarkdownFile = generateMarkdownSummary(query);
|
||||
for (const analysisResult of analysesResults) {
|
||||
const resultsCount = getAnalysisResultCount(analysisResult);
|
||||
if (resultsCount === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append nwo and results count to the summary table
|
||||
const nwo = analysisResult.nwo;
|
||||
const fileName = createFileName(nwo);
|
||||
const link = createRelativeLink(fileName, linkType);
|
||||
summaryFile.content.push(`| ${nwo} | [${resultsCount} result(s)](${link}) |`);
|
||||
|
||||
// Generate individual markdown file for each repository
|
||||
const resultsFileContent = [
|
||||
`### ${analysisResult.nwo}`,
|
||||
''
|
||||
];
|
||||
for (const interpretedResult of analysisResult.interpretedResults) {
|
||||
const individualResult = generateMarkdownForInterpretedResult(interpretedResult, query.language);
|
||||
resultsFileContent.push(...individualResult);
|
||||
}
|
||||
if (analysisResult.rawResults) {
|
||||
const rawResultTable = generateMarkdownForRawResults(analysisResult.rawResults);
|
||||
resultsFileContent.push(...rawResultTable);
|
||||
}
|
||||
resultsFiles.push({
|
||||
fileName: fileName,
|
||||
content: resultsFileContent,
|
||||
});
|
||||
}
|
||||
return [summaryFile, ...resultsFiles];
|
||||
}
|
||||
|
||||
export function generateMarkdownSummary(query: RemoteQuery): MarkdownFile {
|
||||
const lines: string[] = [];
|
||||
// Title
|
||||
lines.push(
|
||||
`### Results for "${query.queryName}"`,
|
||||
''
|
||||
);
|
||||
|
||||
// Expandable section containing query text
|
||||
const queryCodeBlock = [
|
||||
'```ql',
|
||||
...query.queryText.split('\n'),
|
||||
'```',
|
||||
];
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection('Query', queryCodeBlock)
|
||||
);
|
||||
|
||||
// Padding between sections
|
||||
lines.push(
|
||||
'<br />',
|
||||
'',
|
||||
);
|
||||
|
||||
// Summary table
|
||||
lines.push(
|
||||
'### Summary',
|
||||
'',
|
||||
'| Repository | Results |',
|
||||
'| --- | --- |',
|
||||
);
|
||||
// nwo and result count will be appended to this table
|
||||
return {
|
||||
fileName: '_summary',
|
||||
content: lines
|
||||
};
|
||||
}
|
||||
|
||||
function generateMarkdownForInterpretedResult(interpretedResult: AnalysisAlert, language: string): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push(createMarkdownRemoteFileRef(
|
||||
interpretedResult.fileLink,
|
||||
interpretedResult.highlightedRegion?.startLine,
|
||||
interpretedResult.highlightedRegion?.endLine
|
||||
));
|
||||
lines.push('');
|
||||
const codeSnippet = interpretedResult.codeSnippet;
|
||||
const highlightedRegion = interpretedResult.highlightedRegion;
|
||||
if (codeSnippet) {
|
||||
lines.push(
|
||||
...generateMarkdownForCodeSnippet(codeSnippet, language, highlightedRegion),
|
||||
);
|
||||
}
|
||||
const alertMessage = generateMarkdownForAlertMessage(interpretedResult);
|
||||
lines.push(alertMessage, '');
|
||||
|
||||
// If available, show paths
|
||||
const hasPathResults = interpretedResult.codeFlows.length > 0;
|
||||
if (hasPathResults) {
|
||||
const pathLines = generateMarkdownForPathResults(interpretedResult, language);
|
||||
lines.push(...pathLines);
|
||||
}
|
||||
|
||||
// Padding between results
|
||||
lines.push(
|
||||
'----------------------------------------',
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForCodeSnippet(
|
||||
codeSnippet: CodeSnippet,
|
||||
language: string,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
const snippetStartLine = codeSnippet.startLine || 0;
|
||||
const codeLines = codeSnippet.text
|
||||
.split('\n')
|
||||
.map((line, index) =>
|
||||
highlightCodeLines(line, index + snippetStartLine, highlightedRegion)
|
||||
);
|
||||
|
||||
// Make sure there are no extra newlines before or after the <code> block:
|
||||
const codeLinesWrapped = [...codeLines];
|
||||
codeLinesWrapped[0] = `<pre><code class="${language}">${codeLinesWrapped[0]}`;
|
||||
codeLinesWrapped[codeLinesWrapped.length - 1] = `${codeLinesWrapped[codeLinesWrapped.length - 1]}</code></pre>`;
|
||||
|
||||
lines.push(
|
||||
...codeLinesWrapped,
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function highlightCodeLines(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return line;
|
||||
}
|
||||
const partiallyHighlightedLine = parseHighlightedLine(
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
);
|
||||
return `${partiallyHighlightedLine.plainSection1}<strong>${partiallyHighlightedLine.highlightedSection}</strong>${partiallyHighlightedLine.plainSection2}`;
|
||||
}
|
||||
|
||||
function generateMarkdownForAlertMessage(
|
||||
interpretedResult: AnalysisAlert
|
||||
): string {
|
||||
let alertMessage = '';
|
||||
for (const token of interpretedResult.message.tokens) {
|
||||
if (token.t === 'text') {
|
||||
alertMessage += token.text;
|
||||
} else if (token.t === 'location') {
|
||||
alertMessage += createMarkdownRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine,
|
||||
token.text
|
||||
);
|
||||
}
|
||||
}
|
||||
// Italicize the alert message
|
||||
return `*${alertMessage}*`;
|
||||
}
|
||||
|
||||
function generateMarkdownForPathResults(
|
||||
interpretedResult: AnalysisAlert,
|
||||
language: string
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push('#### Paths', '');
|
||||
for (const codeFlow of interpretedResult.codeFlows) {
|
||||
const pathLines: string[] = [];
|
||||
const stepCount = codeFlow.threadFlows.length;
|
||||
const title = `Path with ${stepCount} steps`;
|
||||
for (let i = 0; i < stepCount; i++) {
|
||||
const threadFlow = codeFlow.threadFlows[i];
|
||||
const link = createMarkdownRemoteFileRef(
|
||||
threadFlow.fileLink,
|
||||
threadFlow.highlightedRegion?.startLine,
|
||||
threadFlow.highlightedRegion?.endLine
|
||||
);
|
||||
const codeSnippet = generateMarkdownForCodeSnippet(
|
||||
threadFlow.codeSnippet,
|
||||
language,
|
||||
threadFlow.highlightedRegion
|
||||
);
|
||||
// Indent the snippet to fit with the numbered list.
|
||||
const codeSnippetIndented = codeSnippet.map((line) => ` ${line}`);
|
||||
pathLines.push(`${i + 1}. ${link}`, ...codeSnippetIndented);
|
||||
}
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection(title, pathLines)
|
||||
);
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawResults(
|
||||
analysisRawResults: AnalysisRawResults
|
||||
): string[] {
|
||||
const tableRows: string[] = [];
|
||||
const columnCount = analysisRawResults.schema.columns.length;
|
||||
// Table headers are the column names if they exist, and empty otherwise
|
||||
const headers = analysisRawResults.schema.columns.map(
|
||||
(column) => column.name || ''
|
||||
);
|
||||
const tableHeader = `| ${headers.join(' | ')} |`;
|
||||
|
||||
tableRows.push(tableHeader);
|
||||
tableRows.push('|' + ' --- |'.repeat(columnCount));
|
||||
|
||||
for (const row of analysisRawResults.resultSet.rows) {
|
||||
const cells = row.map((cell) =>
|
||||
generateMarkdownForRawTableCell(cell, analysisRawResults.fileLinkPrefix, analysisRawResults.sourceLocationPrefix)
|
||||
);
|
||||
tableRows.push(`| ${cells.join(' | ')} |`);
|
||||
}
|
||||
return tableRows;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawTableCell(
|
||||
value: CellValue,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
) {
|
||||
let cellValue: string;
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
cellValue = `\`${convertNonPrintableChars(value.toString())}\``;
|
||||
break;
|
||||
case 'object':
|
||||
{
|
||||
const url = tryGetRemoteLocation(value.url, fileLinkPrefix, sourceLocationPrefix);
|
||||
if (url) {
|
||||
cellValue = `[\`${convertNonPrintableChars(value.label)}\`](${url})`;
|
||||
} else {
|
||||
cellValue = `\`${convertNonPrintableChars(value.label)}\``;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
// `|` characters break the table, so we need to escape them
|
||||
return cellValue.replaceAll('|', '\\|');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a markdown link to a remote file.
|
||||
* If the "link text" is not provided, we use the file path.
|
||||
*/
|
||||
export function createMarkdownRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number,
|
||||
linkText?: string,
|
||||
): string {
|
||||
const markdownLink = `[${linkText || fileLink.filePath}](${createRemoteFileRef(fileLink, startLine, endLine)})`;
|
||||
return markdownLink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an expandable markdown section of the form:
|
||||
* <details>
|
||||
* <summary>title</summary>
|
||||
*
|
||||
* contents
|
||||
*
|
||||
* </details>
|
||||
*/
|
||||
function buildExpandableMarkdownSection(title: string, contents: string[]): string[] {
|
||||
const expandableLines: string[] = [];
|
||||
expandableLines.push(
|
||||
'<details>',
|
||||
`<summary>${title}</summary>`,
|
||||
'',
|
||||
...contents,
|
||||
'',
|
||||
'</details>',
|
||||
''
|
||||
);
|
||||
return expandableLines;
|
||||
}
|
||||
|
||||
function createRelativeLink(fileName: string, linkType: MarkdownLinkType): string {
|
||||
switch (linkType) {
|
||||
case 'local':
|
||||
return `./${fileName}.md`;
|
||||
|
||||
case 'gist':
|
||||
// Creates an anchor link to a file in the gist. This is of the form:
|
||||
// '#file-<name>-<file-extension>'
|
||||
return `#file-${fileName}-md`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the name of the markdown file for a given repository nwo.
|
||||
* This name doesn't include the file extension.
|
||||
*/
|
||||
function createFileName(nwo: string) {
|
||||
const [owner, repo] = nwo.split('/');
|
||||
return `${owner}-${repo}`;
|
||||
}
|
||||
@@ -49,7 +49,7 @@ export class RemoteQueriesMonitor {
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
void this.logger.log('Remote query monitoring timed out after 2 days');
|
||||
void this.logger.log('Variant analysis monitoring timed out after 2 days');
|
||||
return { status: 'Cancelled' };
|
||||
}
|
||||
|
||||
|
||||
@@ -7,9 +7,10 @@ import { RemoteQuery } from './remote-query';
|
||||
export interface RemoteQueryHistoryItem {
|
||||
readonly t: 'remote';
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
readonly queryId: string,
|
||||
label: string; // TODO, the query label should have interpolation like local queries
|
||||
remoteQuery: RemoteQuery;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
|
||||
@@ -8,9 +8,11 @@ export interface RemoteQuerySuccessIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
sha?: string;
|
||||
resultCount: number;
|
||||
bqrsFileSize: number;
|
||||
sarifFileSize?: number;
|
||||
sourceLocationPrefix: string;
|
||||
}
|
||||
|
||||
export interface RemoteQueryFailureIndexItem {
|
||||
|
||||
@@ -2,15 +2,26 @@ import { DownloadLink } from './download-link';
|
||||
import { AnalysisFailure } from './shared/analysis-failure';
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
executionEndTime: number; // Can't use a Date here since it needs to be serialized and desserialized.
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
analysisFailures: AnalysisFailure[];
|
||||
queryId: string;
|
||||
executionEndTime: number, // Can't use a Date here since it needs to be serialized and desserialized.
|
||||
analysisSummaries: AnalysisSummary[],
|
||||
analysisFailures: AnalysisFailure[],
|
||||
queryId: string,
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string,
|
||||
databaseSha: string,
|
||||
resultCount: number,
|
||||
sourceLocationPrefix: string,
|
||||
downloadLink: DownloadLink,
|
||||
fileSizeInBytes: number
|
||||
fileSizeInBytes: number,
|
||||
starCount?: number,
|
||||
lastUpdated?: number,
|
||||
}
|
||||
|
||||
/**
|
||||
* Sums up the number of results for all repos queried via a remote query.
|
||||
*/
|
||||
export const sumAnalysisSummariesResults = (analysisSummaries: AnalysisSummary[]): number => {
|
||||
return analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
};
|
||||
|
||||
@@ -6,7 +6,7 @@ export interface RemoteQuery {
|
||||
queryText: string;
|
||||
language: string;
|
||||
controllerRepository: Repository;
|
||||
repositories: Repository[];
|
||||
executionStartTime: number; // Use number here since it needs to be serialized and desserialized.
|
||||
actionsWorkflowRunId: number;
|
||||
repositoryCount: number;
|
||||
}
|
||||
|
||||
210
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
210
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import { QuickPickItem, window } from 'vscode';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteRepositoryLists, getRemoteRepositoryListsPath } from '../config';
|
||||
import { OWNER_REGEX, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
|
||||
export interface RepositorySelection {
|
||||
repositories?: string[];
|
||||
repositoryLists?: string[];
|
||||
owners?: string[];
|
||||
}
|
||||
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repositories?: string[];
|
||||
repositoryList?: string;
|
||||
useCustomRepo?: boolean;
|
||||
useAllReposOfOwner?: boolean;
|
||||
}
|
||||
|
||||
interface RepoList {
|
||||
label: string;
|
||||
repositories: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the repositories or repository lists to run the query against.
|
||||
* @returns The user selection.
|
||||
*/
|
||||
export async function getRepositorySelection(): Promise<RepositorySelection> {
|
||||
const quickPickItems = [
|
||||
createCustomRepoQuickPickItem(),
|
||||
createAllReposOfOwnerQuickPickItem(),
|
||||
...createSystemDefinedRepoListsQuickPickItems(),
|
||||
...(await createUserDefinedRepoListsQuickPickItems()),
|
||||
];
|
||||
|
||||
const options = {
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.variantAnalysis.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
};
|
||||
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
options);
|
||||
|
||||
if (quickpick?.repositories?.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repositories.join(', ')}`);
|
||||
return { repositories: quickpick.repositories };
|
||||
} else if (quickpick?.repositoryList) {
|
||||
void logger.log(`Selected repository list: ${quickpick.repositoryList}`);
|
||||
return { repositoryLists: [quickpick.repositoryList] };
|
||||
} else if (quickpick?.useCustomRepo) {
|
||||
const customRepo = await getCustomRepo();
|
||||
if (customRepo === undefined) {
|
||||
// The user cancelled, do nothing.
|
||||
throw new UserCancellationException('No repositories selected', true);
|
||||
}
|
||||
if (!customRepo || !REPO_REGEX.test(customRepo)) {
|
||||
throw new UserCancellationException('Invalid repository format. Please enter a valid repository in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
}
|
||||
void logger.log(`Entered repository: ${customRepo}`);
|
||||
return { repositories: [customRepo] };
|
||||
} else if (quickpick?.useAllReposOfOwner) {
|
||||
const owner = await getOwner();
|
||||
if (owner === undefined) {
|
||||
// The user cancelled, do nothing.
|
||||
throw new UserCancellationException('No repositories selected', true);
|
||||
}
|
||||
if (!owner || !OWNER_REGEX.test(owner)) {
|
||||
throw new Error(`Invalid user or organization: ${owner}`);
|
||||
}
|
||||
void logger.log(`Entered owner: ${owner}`);
|
||||
return { owners: [owner] };
|
||||
} else {
|
||||
// We don't need to display a warning pop-up in this case, since the user just escaped out of the operation.
|
||||
// We set 'true' to make this a silent exception.
|
||||
throw new UserCancellationException('No repositories selected', true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the selection is valid or not.
|
||||
* @param repoSelection The selection to check.
|
||||
* @returns A boolean flag indicating if the selection is valid or not.
|
||||
*/
|
||||
export function isValidSelection(repoSelection: RepositorySelection): boolean {
|
||||
const repositories = repoSelection.repositories || [];
|
||||
const repositoryLists = repoSelection.repositoryLists || [];
|
||||
const owners = repoSelection.owners || [];
|
||||
|
||||
return (repositories.length > 0 || repositoryLists.length > 0 || owners.length > 0);
|
||||
}
|
||||
|
||||
function createSystemDefinedRepoListsQuickPickItems(): RepoListQuickPickItem[] {
|
||||
const topNs = [10, 100, 1000];
|
||||
|
||||
return topNs.map(n => ({
|
||||
label: '$(star) Top ' + n,
|
||||
repositoryList: `top_${n}`,
|
||||
alwaysShow: true
|
||||
} as RepoListQuickPickItem));
|
||||
}
|
||||
|
||||
async function readExternalRepoLists(): Promise<RepoList[]> {
|
||||
const repoLists: RepoList[] = [];
|
||||
|
||||
const path = getRemoteRepositoryListsPath();
|
||||
if (!path) {
|
||||
return repoLists;
|
||||
}
|
||||
|
||||
await validateExternalRepoListsFile(path);
|
||||
const json = await readExternalRepoListsJson(path);
|
||||
|
||||
for (const [repoListName, repositories] of Object.entries(json)) {
|
||||
if (!Array.isArray(repositories)) {
|
||||
throw Error('Invalid repository lists file. It should contain an array of repositories for each list.');
|
||||
}
|
||||
|
||||
repoLists.push({
|
||||
label: repoListName,
|
||||
repositories
|
||||
});
|
||||
}
|
||||
|
||||
return repoLists;
|
||||
}
|
||||
|
||||
async function validateExternalRepoListsFile(path: string): Promise<void> {
|
||||
const pathExists = await fs.pathExists(path);
|
||||
if (!pathExists) {
|
||||
throw Error(`External repository lists file does not exist at ${path}`);
|
||||
}
|
||||
|
||||
const pathStat = await fs.stat(path);
|
||||
if (pathStat.isDirectory()) {
|
||||
throw Error('External repository lists path should not point to a directory');
|
||||
}
|
||||
}
|
||||
|
||||
async function readExternalRepoListsJson(path: string): Promise<Record<string, unknown>> {
|
||||
let json;
|
||||
|
||||
try {
|
||||
const fileContents = await fs.readFile(path, 'utf8');
|
||||
json = await JSON.parse(fileContents);
|
||||
} catch (error) {
|
||||
throw Error('Invalid repository lists file. It should contain valid JSON.');
|
||||
}
|
||||
|
||||
if (Array.isArray(json)) {
|
||||
throw Error('Invalid repository lists file. It should be an object mapping names to a list of repositories.');
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
function readRepoListsFromSettings(): RepoList[] {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (!repoLists) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(repoLists).map<RepoList>(([label, repositories]) => (
|
||||
{
|
||||
label,
|
||||
repositories
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
async function createUserDefinedRepoListsQuickPickItems(): Promise<RepoListQuickPickItem[]> {
|
||||
const repoListsFromSetings = readRepoListsFromSettings();
|
||||
const repoListsFromExternalFile = await readExternalRepoLists();
|
||||
|
||||
return [...repoListsFromSetings, ...repoListsFromExternalFile];
|
||||
}
|
||||
|
||||
function createCustomRepoQuickPickItem(): RepoListQuickPickItem {
|
||||
return {
|
||||
label: '$(edit) Enter a GitHub repository',
|
||||
useCustomRepo: true,
|
||||
alwaysShow: true,
|
||||
};
|
||||
}
|
||||
|
||||
function createAllReposOfOwnerQuickPickItem(): RepoListQuickPickItem {
|
||||
return {
|
||||
label: '$(edit) Enter a GitHub user or organization',
|
||||
useAllReposOfOwner: true,
|
||||
alwaysShow: true
|
||||
};
|
||||
}
|
||||
|
||||
async function getCustomRepo(): Promise<string | undefined> {
|
||||
return await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.variantAnalysis.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
}
|
||||
|
||||
async function getOwner(): Promise<string | undefined> {
|
||||
return await window.showInputBox({
|
||||
title: 'Enter a GitHub user or organization',
|
||||
ignoreFocusOut: true
|
||||
});
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { CancellationToken, QuickPickItem, Uri, window } from 'vscode';
|
||||
import { CancellationToken, Uri, window } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import {
|
||||
askForLanguage,
|
||||
@@ -9,19 +10,21 @@ import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tryGetQueryMetadata,
|
||||
pluralize,
|
||||
tmpDir
|
||||
} from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import * as cli from '../cli';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteControllerRepo, getRemoteRepositoryLists, setRemoteControllerRepo } from '../config';
|
||||
import { getActionBranch, getRemoteControllerRepo, setRemoteControllerRepo } from '../config';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { OctokitResponse } from '@octokit/types/dist-types';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { getErrorMessage, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { getRepositorySelection, isValidSelection, RepositorySelection } from './repository-selection';
|
||||
|
||||
export interface QlPack {
|
||||
name: string;
|
||||
@@ -30,71 +33,24 @@ export interface QlPack {
|
||||
defaultSuite?: Record<string, unknown>[];
|
||||
defaultSuiteFile?: string;
|
||||
}
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repoList: string[];
|
||||
}
|
||||
|
||||
interface QueriesResponse {
|
||||
workflow_run_id: number
|
||||
workflow_run_id: number,
|
||||
errors?: {
|
||||
invalid_repositories?: string[],
|
||||
repositories_without_database?: string[],
|
||||
private_repositories?: string[],
|
||||
cutoff_repositories?: string[],
|
||||
cutoff_repositories_count?: number,
|
||||
},
|
||||
repositories_queried: string[],
|
||||
}
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters or single hyphens, starting and ending in an alphanumeric character
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, or underscores
|
||||
*/
|
||||
const REPO_REGEX = /^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+\/[a-zA-Z0-9-_]+$/;
|
||||
|
||||
/**
|
||||
* Well-known names for the query pack used by the server.
|
||||
*/
|
||||
const QUERY_PACK_NAME = 'codeql-remote/query';
|
||||
|
||||
/**
|
||||
* Gets the repositories to run the query against.
|
||||
*/
|
||||
export async function getRepositories(): Promise<string[] | undefined> {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (repoLists && Object.keys(repoLists).length) {
|
||||
const quickPickItems = Object.entries(repoLists).map<RepoListQuickPickItem>(([key, value]) => (
|
||||
{
|
||||
label: key, // the name of the repository list
|
||||
repoList: value, // the actual array of repositories
|
||||
}
|
||||
));
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
{
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.remoteQueries.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (quickpick?.repoList.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repoList.join(', ')}`);
|
||||
return quickpick.repoList;
|
||||
} else {
|
||||
void showAndLogErrorMessage('No repositories selected.');
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
void logger.log('No repository lists defined. Displaying text input box.');
|
||||
const remoteRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.remoteQueries.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!remoteRepo) {
|
||||
void showAndLogErrorMessage('No repositories entered.');
|
||||
return;
|
||||
} else if (!REPO_REGEX.test(remoteRepo)) { // Check if user entered invalid input
|
||||
void showAndLogErrorMessage('Invalid repository format. Must be in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
return;
|
||||
}
|
||||
void logger.log(`Entered repository: ${remoteRepo}`);
|
||||
return [remoteRepo];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Two possibilities:
|
||||
* 1. There is no qlpack.yml in this directory. Assume this is a lone query and generate a synthetic qlpack for it.
|
||||
@@ -158,7 +114,7 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
|
||||
[`codeql/${language}-all`]: '*',
|
||||
}
|
||||
};
|
||||
await fs.writeFile(path.join(queryPackDir, 'qlpack.yml'), yaml.safeDump(syntheticQueryPack));
|
||||
await fs.writeFile(path.join(queryPackDir, 'qlpack.yml'), yaml.dump(syntheticQueryPack));
|
||||
}
|
||||
if (!language) {
|
||||
throw new UserCancellationException('Could not determine language.');
|
||||
@@ -187,7 +143,7 @@ async function findPackRoot(queryFile: string): Promise<string> {
|
||||
while (!(await fs.pathExists(path.join(dir, 'qlpack.yml')))) {
|
||||
dir = path.dirname(dir);
|
||||
if (isFileSystemRoot(dir)) {
|
||||
// there is no qlpack.yml in this direcory or any parent directory.
|
||||
// there is no qlpack.yml in this directory or any parent directory.
|
||||
// just use the query file's directory as the pack root.
|
||||
return path.dirname(queryFile);
|
||||
}
|
||||
@@ -225,7 +181,7 @@ export async function runRemoteQuery(
|
||||
token: CancellationToken
|
||||
): Promise<void | RemoteQuerySubmissionResult> {
|
||||
if (!(await cliServer.cliConstraints.supportsRemoteQueries())) {
|
||||
throw new Error(`Remote queries are not supported by this version of CodeQL. Please upgrade to v${cli.CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES
|
||||
throw new Error(`Variant analysis is not supported by this version of CodeQL. Please upgrade to v${cli.CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES
|
||||
} or later.`);
|
||||
}
|
||||
|
||||
@@ -243,8 +199,8 @@ export async function runRemoteQuery(
|
||||
message: 'Determining query target language'
|
||||
});
|
||||
|
||||
const repositories = await getRepositories();
|
||||
if (!repositories || repositories.length === 0) {
|
||||
const repoSelection = await getRepositorySelection();
|
||||
if (!isValidSelection(repoSelection)) {
|
||||
throw new UserCancellationException('No repositories to query.');
|
||||
}
|
||||
|
||||
@@ -261,7 +217,7 @@ export async function runRemoteQuery(
|
||||
if (!controllerRepo || !REPO_REGEX.test(controllerRepo)) {
|
||||
void logger.log(controllerRepo ? 'Invalid controller repository name.' : 'No controller repository defined.');
|
||||
controllerRepo = await window.showInputBox({
|
||||
title: 'Controller repository in which to display progress and results of remote queries',
|
||||
title: 'Controller repository in which to display progress and results of variant analysis',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Enter the name of a GitHub repository in the format <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
@@ -302,26 +258,29 @@ export async function runRemoteQuery(
|
||||
message: 'Sending request'
|
||||
});
|
||||
|
||||
const workflowRunId = await runRemoteQueriesApiRequest(credentials, 'main', language, repositories, owner, repo, base64Pack, dryRun);
|
||||
const actionBranch = getActionBranch();
|
||||
const apiResponse = await runRemoteQueriesApiRequest(credentials, actionBranch, language, repoSelection, owner, repo, base64Pack, dryRun);
|
||||
const queryStartTime = Date.now();
|
||||
const queryMetadata = await tryGetQueryMetadata(cliServer, queryFile);
|
||||
|
||||
if (dryRun) {
|
||||
return { queryDirPath: remoteQueryDir.path };
|
||||
} else {
|
||||
if (!workflowRunId) {
|
||||
if (!apiResponse) {
|
||||
return;
|
||||
}
|
||||
|
||||
const workflowRunId = apiResponse.workflow_run_id;
|
||||
const repositoryCount = apiResponse.repositories_queried.length;
|
||||
const remoteQuery = await buildRemoteQueryEntity(
|
||||
repositories,
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
owner,
|
||||
repo,
|
||||
queryStartTime,
|
||||
workflowRunId,
|
||||
language);
|
||||
language,
|
||||
repositoryCount);
|
||||
|
||||
// don't return the path because it has been deleted
|
||||
return { query: remoteQuery };
|
||||
@@ -341,15 +300,31 @@ async function runRemoteQueriesApiRequest(
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
repoSelection: RepositorySelection,
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
): Promise<void | number> {
|
||||
): Promise<void | QueriesResponse> {
|
||||
const data = {
|
||||
ref,
|
||||
language,
|
||||
repositories: repoSelection.repositories ?? undefined,
|
||||
repository_lists: repoSelection.repositoryLists ?? undefined,
|
||||
repository_owners: repoSelection.owners ?? undefined,
|
||||
query_pack: queryPackBase64,
|
||||
};
|
||||
|
||||
if (dryRun) {
|
||||
void showAndLogInformationMessage('[DRY RUN] Would have sent request. See extension log for the payload.');
|
||||
void logger.log(JSON.stringify({ ref, language, repositories, owner, repo, queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes' }));
|
||||
void logger.log(JSON.stringify({
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
...data,
|
||||
queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes'
|
||||
}
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -360,57 +335,67 @@ async function runRemoteQueriesApiRequest(
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
ref,
|
||||
language,
|
||||
repositories,
|
||||
query_pack: queryPackBase64,
|
||||
}
|
||||
data
|
||||
}
|
||||
);
|
||||
const workflowRunId = response.data.workflow_run_id;
|
||||
void showAndLogInformationMessage(`Successfully scheduled runs. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${workflowRunId}).`);
|
||||
return workflowRunId;
|
||||
} catch (error) {
|
||||
return await attemptRerun(error, credentials, ref, language, repositories, owner, repo, queryPackBase64, dryRun);
|
||||
const { popupMessage, logMessage } = parseResponse(owner, repo, response.data);
|
||||
void showAndLogInformationMessage(popupMessage, { fullMessage: logMessage });
|
||||
return response.data;
|
||||
} catch (error: any) {
|
||||
if (error.status === 404) {
|
||||
void showAndLogErrorMessage(`Controller repository was not found. Please make sure it's a valid repo name.${eol}`);
|
||||
} else {
|
||||
void showAndLogErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Attempts to rerun the query on only the valid repositories */
|
||||
export async function attemptRerun(
|
||||
error: any,
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
) {
|
||||
if (typeof error.message === 'string' && error.message.includes('Some repositories were invalid')) {
|
||||
const invalidRepos = error?.response?.data?.invalid_repos || [];
|
||||
void logger.log('Unable to run query on some of the specified repositories');
|
||||
if (invalidRepos.length > 0) {
|
||||
void logger.log(`Invalid repos: ${invalidRepos.join(', ')}`);
|
||||
}
|
||||
const eol = os.EOL;
|
||||
const eol2 = os.EOL + os.EOL;
|
||||
|
||||
if (invalidRepos.length === repositories.length) {
|
||||
// Every repo is invalid in some way
|
||||
void showAndLogErrorMessage('Unable to run query on any of the specified repositories.');
|
||||
return;
|
||||
}
|
||||
// exported for testing only
|
||||
export function parseResponse(owner: string, repo: string, response: QueriesResponse) {
|
||||
const repositoriesQueried = response.repositories_queried;
|
||||
const repositoryCount = repositoriesQueried.length;
|
||||
|
||||
const popupMessage = 'Unable to run query on some of the specified repositories. [See logs for more details](command:codeQL.showLogs).';
|
||||
const rerunQuery = await showInformationMessageWithAction(popupMessage, 'Rerun on the valid repositories only');
|
||||
if (rerunQuery) {
|
||||
const validRepositories = repositories.filter(r => !invalidRepos.includes(r));
|
||||
void logger.log(`Rerunning query on set of valid repositories: ${JSON.stringify(validRepositories)}`);
|
||||
return await runRemoteQueriesApiRequest(credentials, ref, language, validRepositories, owner, repo, queryPackBase64, dryRun);
|
||||
const popupMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}).`
|
||||
+ (response.errors ? `${eol2}Some repositories could not be scheduled. See extension log for details.` : '');
|
||||
|
||||
let logMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. See https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}.`;
|
||||
logMessage += `${eol2}Repositories queried:${eol}${repositoriesQueried.join(', ')}`;
|
||||
if (response.errors) {
|
||||
const { invalid_repositories, repositories_without_database, private_repositories, cutoff_repositories, cutoff_repositories_count } = response.errors;
|
||||
logMessage += `${eol2}Some repositories could not be scheduled.`;
|
||||
if (invalid_repositories?.length) {
|
||||
logMessage += `${eol2}${pluralize(invalid_repositories.length, 'repository', 'repositories')} invalid and could not be found:${eol}${invalid_repositories.join(', ')}`;
|
||||
}
|
||||
if (repositories_without_database?.length) {
|
||||
logMessage += `${eol2}${pluralize(repositories_without_database.length, 'repository', 'repositories')} did not have a CodeQL database available:${eol}${repositories_without_database.join(', ')}`;
|
||||
logMessage += `${eol}For each public repository that has not yet been added to the database service, we will try to create a database next time the store is updated.`;
|
||||
}
|
||||
if (private_repositories?.length) {
|
||||
logMessage += `${eol2}${pluralize(private_repositories.length, 'repository', 'repositories')} not public:${eol}${private_repositories.join(', ')}`;
|
||||
logMessage += `${eol}When using a public controller repository, only public repositories can be queried.`;
|
||||
}
|
||||
if (cutoff_repositories_count) {
|
||||
logMessage += `${eol2}${pluralize(cutoff_repositories_count, 'repository', 'repositories')} over the limit for a single request`;
|
||||
if (cutoff_repositories) {
|
||||
logMessage += `:${eol}${cutoff_repositories.join(', ')}`;
|
||||
if (cutoff_repositories_count !== cutoff_repositories.length) {
|
||||
const moreRepositories = cutoff_repositories_count - cutoff_repositories.length;
|
||||
logMessage += `${eol}...${eol}And another ${pluralize(moreRepositories, 'repository', 'repositories')}.`;
|
||||
}
|
||||
} else {
|
||||
logMessage += '.';
|
||||
}
|
||||
logMessage += `${eol}Repositories were selected based on how recently they had been updated.`;
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage(error);
|
||||
}
|
||||
|
||||
return {
|
||||
popupMessage,
|
||||
logMessage
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -424,37 +409,32 @@ export async function attemptRerun(
|
||||
*/
|
||||
async function ensureNameAndSuite(queryPackDir: string, packRelativePath: string): Promise<void> {
|
||||
const packPath = path.join(queryPackDir, 'qlpack.yml');
|
||||
const qlpack = yaml.safeLoad(await fs.readFile(packPath, 'utf8')) as QlPack;
|
||||
const qlpack = yaml.load(await fs.readFile(packPath, 'utf8')) as QlPack;
|
||||
delete qlpack.defaultSuiteFile;
|
||||
|
||||
qlpack.name = QUERY_PACK_NAME;
|
||||
|
||||
qlpack.defaultSuite = [{
|
||||
description: 'Query suite for remote query'
|
||||
description: 'Query suite for variant analysis'
|
||||
}, {
|
||||
query: packRelativePath.replace(/\\/g, '/')
|
||||
}];
|
||||
await fs.writeFile(packPath, yaml.safeDump(qlpack));
|
||||
await fs.writeFile(packPath, yaml.dump(qlpack));
|
||||
}
|
||||
|
||||
async function buildRemoteQueryEntity(
|
||||
repositories: string[],
|
||||
queryFilePath: string,
|
||||
queryMetadata: QueryMetadata | undefined,
|
||||
controllerRepoOwner: string,
|
||||
controllerRepoName: string,
|
||||
queryStartTime: number,
|
||||
workflowRunId: number,
|
||||
language: string
|
||||
language: string,
|
||||
repositoryCount: number
|
||||
): Promise<RemoteQuery> {
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
const queryName = queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
|
||||
const queryRepos = repositories.map(r => {
|
||||
const [owner, repo] = r.split('/');
|
||||
return { owner: owner, name: repo };
|
||||
});
|
||||
|
||||
const queryText = await fs.readFile(queryFilePath, 'utf8');
|
||||
|
||||
return {
|
||||
@@ -466,8 +446,8 @@ async function buildRemoteQueryEntity(
|
||||
owner: controllerRepoOwner,
|
||||
name: controllerRepoName,
|
||||
},
|
||||
repositories: queryRepos,
|
||||
executionStartTime: queryStartTime,
|
||||
actionsWorkflowRunId: workflowRunId
|
||||
actionsWorkflowRunId: workflowRunId,
|
||||
repositoryCount,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryResult } from './remote-query-result';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
export const sampleRemoteQuery: RemoteQuery = {
|
||||
queryName: 'Inefficient regular expression',
|
||||
queryFilePath: '/Users/foo/dev/vscode-codeql-starter/ql/javascript/ql/src/Performance/ReDoS.ql',
|
||||
queryText: '/**\n * @name Inefficient regular expression\n * @description A regular expression that requires exponential time to match certain inputs\n * can be a performance bottleneck, and may be vulnerable to denial-of-service\n * attacks.\n * @kind problem\n * @problem.severity error\n * @security-severity 7.5\n * @precision high\n * @id js/redos\n * @tags security\n * external/cwe/cwe-1333\n * external/cwe/cwe-730\n * external/cwe/cwe-400\n */\n\nimport javascript\nimport semmle.javascript.security.performance.ReDoSUtil\nimport semmle.javascript.security.performance.ExponentialBackTracking\n\nfrom RegExpTerm t, string pump, State s, string prefixMsg\nwhere hasReDoSResult(t, pump, s, prefixMsg)\nselect t,\n "This part of the regular expression may cause exponential backtracking on strings " + prefixMsg +\n "containing many repetitions of \'" + pump + "\'."\n',
|
||||
language: 'javascript',
|
||||
controllerRepository: {
|
||||
owner: 'big-corp',
|
||||
name: 'controller-repo'
|
||||
},
|
||||
repositories: [
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo1'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo2'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo3'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo4'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo5'
|
||||
}
|
||||
],
|
||||
executionStartTime: new Date('2022-01-06T17:02:15.026Z').getTime(),
|
||||
actionsWorkflowRunId: 1662757118
|
||||
};
|
||||
|
||||
export const sampleRemoteQueryResult: RemoteQueryResult = {
|
||||
queryId: 'query123',
|
||||
executionEndTime: new Date('2022-01-06T17:04:37.026Z').getTime(),
|
||||
analysisSummaries: [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
resultCount: 85,
|
||||
fileSizeInBytes: 14123,
|
||||
downloadLink: {
|
||||
id: '137697017',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697017',
|
||||
innerFilePath: 'results.sarif',
|
||||
queryId: 'query.ql-123-xyz'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
resultCount: 20,
|
||||
fileSizeInBytes: 8698,
|
||||
downloadLink: {
|
||||
id: '137697018',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697018',
|
||||
innerFilePath: 'results.sarif',
|
||||
queryId: 'query.ql-123-xyz'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
resultCount: 8,
|
||||
fileSizeInBytes: 4123,
|
||||
downloadLink: {
|
||||
id: '137697019',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697019',
|
||||
innerFilePath: 'results.sarif',
|
||||
queryId: 'query.ql-123-xyz'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
resultCount: 3,
|
||||
fileSizeInBytes: 3313,
|
||||
downloadLink: {
|
||||
id: '137697020',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697020',
|
||||
innerFilePath: 'results.sarif',
|
||||
queryId: 'query.ql-123-xyz'
|
||||
}
|
||||
}
|
||||
],
|
||||
analysisFailures: [
|
||||
{
|
||||
nwo: 'big-corp/repo5',
|
||||
error: 'Error message'
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo6',
|
||||
error: 'Error message'
|
||||
},
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
const createAnalysisResults = (n: number) => Array(n).fill(
|
||||
{
|
||||
message: 'This shell command depends on an uncontrolled [absolute path](1).',
|
||||
severity: 'Error',
|
||||
filePath: 'npm-packages/meteor-installer/config.js',
|
||||
codeSnippet: {
|
||||
startLine: 253,
|
||||
endLine: 257,
|
||||
text: ' if (isWindows()) {\n //set for the current session and beyond\n child_process.execSync(`setx path "${meteorPath}/;%path%`);\n return;\n }\n',
|
||||
},
|
||||
highlightedRegion: {
|
||||
startLine: 255,
|
||||
startColumn: 28,
|
||||
endColumn: 62
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const sampleAnalysesResultsStage1: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
// No entries for repo4
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsStage2: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsStage3: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(8)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(3)
|
||||
},
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsWithFailure: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'Failed',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(3)
|
||||
},
|
||||
];
|
||||
@@ -1,130 +1,100 @@
|
||||
import * as sarif from 'sarif';
|
||||
import { parseSarifPlainTextMessage, parseSarifRegion } from '../pure/sarif-utils';
|
||||
|
||||
import { AnalysisAlert, ResultSeverity } from './shared/analysis-result';
|
||||
import {
|
||||
AnalysisAlert,
|
||||
CodeFlow,
|
||||
AnalysisMessage,
|
||||
AnalysisMessageToken,
|
||||
ResultSeverity,
|
||||
ThreadFlow,
|
||||
CodeSnippet,
|
||||
HighlightedRegion
|
||||
} from './shared/analysis-result';
|
||||
|
||||
const defaultSeverity = 'Warning';
|
||||
|
||||
export function extractAnalysisAlerts(
|
||||
sarifLog: sarif.Log
|
||||
sarifLog: sarif.Log,
|
||||
fileLinkPrefix: string
|
||||
): {
|
||||
alerts: AnalysisAlert[],
|
||||
errors: string[]
|
||||
} {
|
||||
if (!sarifLog) {
|
||||
return { alerts: [], errors: ['No SARIF log was found'] };
|
||||
}
|
||||
|
||||
if (!sarifLog.runs) {
|
||||
return { alerts: [], errors: ['No runs found in the SARIF file'] };
|
||||
}
|
||||
|
||||
const errors: string[] = [];
|
||||
const alerts: AnalysisAlert[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const run of sarifLog.runs) {
|
||||
if (!run.results) {
|
||||
errors.push('No results found in the SARIF run');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const result of run.results) {
|
||||
const message = result.message?.text;
|
||||
if (!message) {
|
||||
errors.push('No message found in the SARIF result');
|
||||
for (const run of sarifLog.runs ?? []) {
|
||||
for (const result of run.results ?? []) {
|
||||
try {
|
||||
alerts.push(...extractResultAlerts(run, result, fileLinkPrefix));
|
||||
} catch (e) {
|
||||
errors.push(`Error when processing SARIF result: ${e}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const severity = tryGetSeverity(run, result) || defaultSeverity;
|
||||
|
||||
if (!result.locations) {
|
||||
errors.push('No locations found in the SARIF result');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const location of result.locations) {
|
||||
const contextRegion = location.physicalLocation?.contextRegion;
|
||||
if (!contextRegion) {
|
||||
errors.push('No context region found in the SARIF result location');
|
||||
continue;
|
||||
}
|
||||
if (contextRegion.startLine === undefined) {
|
||||
errors.push('No start line set for a result context region');
|
||||
continue;
|
||||
}
|
||||
if (contextRegion.endLine === undefined) {
|
||||
errors.push('No end line set for a result context region');
|
||||
continue;
|
||||
}
|
||||
if (!contextRegion.snippet?.text) {
|
||||
errors.push('No text set for a result context region');
|
||||
continue;
|
||||
}
|
||||
|
||||
const region = location.physicalLocation?.region;
|
||||
if (!region) {
|
||||
errors.push('No region found in the SARIF result location');
|
||||
continue;
|
||||
}
|
||||
if (region.startLine === undefined) {
|
||||
errors.push('No start line set for a result region');
|
||||
continue;
|
||||
}
|
||||
if (region.startColumn === undefined) {
|
||||
errors.push('No start column set for a result region');
|
||||
continue;
|
||||
}
|
||||
if (region.endColumn === undefined) {
|
||||
errors.push('No end column set for a result region');
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = location.physicalLocation?.artifactLocation?.uri;
|
||||
if (!filePath) {
|
||||
errors.push('No file path found in the SARIF result location');
|
||||
continue;
|
||||
}
|
||||
|
||||
const analysisAlert = {
|
||||
message,
|
||||
filePath,
|
||||
severity,
|
||||
codeSnippet: {
|
||||
startLine: contextRegion.startLine,
|
||||
endLine: contextRegion.endLine,
|
||||
text: contextRegion.snippet.text
|
||||
},
|
||||
highlightedRegion: {
|
||||
startLine: region.startLine,
|
||||
startColumn: region.startColumn,
|
||||
endLine: region.endLine,
|
||||
endColumn: region.endColumn
|
||||
}
|
||||
};
|
||||
|
||||
const validationErrors = getAlertValidationErrors(analysisAlert);
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push(...validationErrors);
|
||||
continue;
|
||||
}
|
||||
|
||||
alerts.push(analysisAlert);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { alerts, errors };
|
||||
}
|
||||
|
||||
export function tryGetSeverity(
|
||||
sarifRun: sarif.Run,
|
||||
result: sarif.Result
|
||||
): ResultSeverity | undefined {
|
||||
if (!sarifRun || !result) {
|
||||
return undefined;
|
||||
function extractResultAlerts(
|
||||
run: sarif.Run,
|
||||
result: sarif.Result,
|
||||
fileLinkPrefix: string
|
||||
): AnalysisAlert[] {
|
||||
const alerts: AnalysisAlert[] = [];
|
||||
|
||||
const message = getMessage(result, fileLinkPrefix);
|
||||
const rule = tryGetRule(run, result);
|
||||
const severity = tryGetSeverity(run, result, rule) || defaultSeverity;
|
||||
const codeFlows = getCodeFlows(result, fileLinkPrefix);
|
||||
const shortDescription = getShortDescription(rule, message!);
|
||||
|
||||
for (const location of result.locations ?? []) {
|
||||
const physicalLocation = location.physicalLocation!;
|
||||
const filePath = physicalLocation.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
const analysisAlert: AnalysisAlert = {
|
||||
message,
|
||||
shortDescription,
|
||||
fileLink: {
|
||||
fileLinkPrefix,
|
||||
filePath,
|
||||
},
|
||||
severity,
|
||||
codeSnippet,
|
||||
highlightedRegion,
|
||||
codeFlows: codeFlows
|
||||
};
|
||||
|
||||
alerts.push(analysisAlert);
|
||||
}
|
||||
|
||||
const rule = tryGetRule(sarifRun, result);
|
||||
if (!rule) {
|
||||
return alerts;
|
||||
}
|
||||
|
||||
function getShortDescription(
|
||||
rule: sarif.ReportingDescriptor | undefined,
|
||||
message: AnalysisMessage,
|
||||
): string {
|
||||
if (rule?.shortDescription?.text) {
|
||||
return rule.shortDescription.text;
|
||||
}
|
||||
|
||||
return message.tokens.map(token => token.text).join('');
|
||||
}
|
||||
|
||||
export function tryGetSeverity(
|
||||
sarifRun: sarif.Run,
|
||||
result: sarif.Result,
|
||||
rule: sarif.ReportingDescriptor | undefined
|
||||
): ResultSeverity | undefined {
|
||||
if (!sarifRun || !result || !rule) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -186,18 +156,98 @@ export function tryGetRule(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getAlertValidationErrors(alert: AnalysisAlert): string[] {
|
||||
const errors = [];
|
||||
function getCodeSnippet(region?: sarif.Region, alternateRegion?: sarif.Region): CodeSnippet | undefined {
|
||||
region = region ?? alternateRegion;
|
||||
|
||||
if (alert.codeSnippet.startLine > alert.codeSnippet.endLine) {
|
||||
errors.push('The code snippet start line is greater than the end line');
|
||||
if (!region) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const highlightedRegion = alert.highlightedRegion;
|
||||
if (highlightedRegion.endLine === highlightedRegion.startLine &&
|
||||
highlightedRegion.endColumn < highlightedRegion.startColumn) {
|
||||
errors.push('The highlighted region end column is greater than the start column');
|
||||
}
|
||||
const text = region.snippet?.text || '';
|
||||
const { startLine, endLine } = parseSarifRegion(region);
|
||||
|
||||
return errors;
|
||||
return {
|
||||
startLine,
|
||||
endLine,
|
||||
text
|
||||
};
|
||||
}
|
||||
|
||||
function getHighlightedRegion(region: sarif.Region): HighlightedRegion {
|
||||
const { startLine, startColumn, endLine, endColumn } = parseSarifRegion(region);
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
|
||||
// parseSarifRegion currently shifts the end column by 1 to account
|
||||
// for the way vscode counts columns so we need to shift it back.
|
||||
endColumn: endColumn + 1
|
||||
};
|
||||
}
|
||||
|
||||
function getCodeFlows(
|
||||
result: sarif.Result,
|
||||
fileLinkPrefix: string
|
||||
): CodeFlow[] {
|
||||
const codeFlows = [];
|
||||
|
||||
if (result.codeFlows) {
|
||||
for (const codeFlow of result.codeFlows) {
|
||||
const threadFlows = [];
|
||||
|
||||
for (const threadFlow of codeFlow.threadFlows) {
|
||||
for (const threadFlowLocation of threadFlow.locations) {
|
||||
const physicalLocation = threadFlowLocation!.location!.physicalLocation!;
|
||||
const filePath = physicalLocation!.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
threadFlows.push({
|
||||
fileLink: {
|
||||
fileLinkPrefix,
|
||||
filePath,
|
||||
},
|
||||
codeSnippet,
|
||||
highlightedRegion
|
||||
} as ThreadFlow);
|
||||
}
|
||||
}
|
||||
|
||||
codeFlows.push({ threadFlows } as CodeFlow);
|
||||
}
|
||||
}
|
||||
|
||||
return codeFlows;
|
||||
}
|
||||
|
||||
function getMessage(result: sarif.Result, fileLinkPrefix: string): AnalysisMessage {
|
||||
const tokens: AnalysisMessageToken[] = [];
|
||||
|
||||
const messageText = result.message!.text!;
|
||||
const messageParts = parseSarifPlainTextMessage(messageText);
|
||||
|
||||
for (const messagePart of messageParts) {
|
||||
if (typeof messagePart === 'string') {
|
||||
tokens.push({ t: 'text', text: messagePart });
|
||||
} else {
|
||||
const relatedLocation = result.relatedLocations!.find(rl => rl.id === messagePart.dest);
|
||||
tokens.push({
|
||||
t: 'location',
|
||||
text: messagePart.text,
|
||||
location: {
|
||||
fileLink: {
|
||||
fileLinkPrefix: fileLinkPrefix,
|
||||
filePath: relatedLocation!.physicalLocation!.artifactLocation!.uri!,
|
||||
},
|
||||
highlightedRegion: getHighlightedRegion(relatedLocation!.physicalLocation!.region!),
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { tokens };
|
||||
}
|
||||
|
||||
@@ -1,17 +1,38 @@
|
||||
import { RawResultSet, ResultSetSchema } from '../../pure/bqrs-cli-types';
|
||||
|
||||
export type AnalysisResultStatus = 'InProgress' | 'Completed' | 'Failed';
|
||||
|
||||
export interface AnalysisResults {
|
||||
nwo: string;
|
||||
status: AnalysisResultStatus;
|
||||
results: AnalysisAlert[];
|
||||
interpretedResults: AnalysisAlert[];
|
||||
rawResults?: AnalysisRawResults;
|
||||
resultCount: number,
|
||||
starCount?: number,
|
||||
lastUpdated?: number,
|
||||
}
|
||||
|
||||
export interface AnalysisRawResults {
|
||||
schema: ResultSetSchema;
|
||||
resultSet: RawResultSet;
|
||||
fileLinkPrefix: string;
|
||||
sourceLocationPrefix: string;
|
||||
capped: boolean;
|
||||
}
|
||||
|
||||
export interface AnalysisAlert {
|
||||
message: string;
|
||||
message: AnalysisMessage;
|
||||
shortDescription: string;
|
||||
severity: ResultSeverity;
|
||||
fileLink: FileLink;
|
||||
codeSnippet?: CodeSnippet;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
codeFlows: CodeFlow[];
|
||||
}
|
||||
|
||||
export interface FileLink {
|
||||
fileLinkPrefix: string;
|
||||
filePath: string;
|
||||
codeSnippet: CodeSnippet
|
||||
highlightedRegion: HighlightedRegion
|
||||
}
|
||||
|
||||
export interface CodeSnippet {
|
||||
@@ -23,8 +44,55 @@ export interface CodeSnippet {
|
||||
export interface HighlightedRegion {
|
||||
startLine: number;
|
||||
startColumn: number;
|
||||
endLine: number | undefined;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
}
|
||||
|
||||
export interface CodeFlow {
|
||||
threadFlows: ThreadFlow[];
|
||||
}
|
||||
|
||||
export interface ThreadFlow {
|
||||
fileLink: FileLink;
|
||||
codeSnippet: CodeSnippet;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
message?: AnalysisMessage;
|
||||
}
|
||||
|
||||
export interface AnalysisMessage {
|
||||
tokens: AnalysisMessageToken[]
|
||||
}
|
||||
|
||||
export type AnalysisMessageToken =
|
||||
| AnalysisMessageTextToken
|
||||
| AnalysisMessageLocationToken;
|
||||
|
||||
export interface AnalysisMessageTextToken {
|
||||
t: 'text';
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface AnalysisMessageLocationToken {
|
||||
t: 'location';
|
||||
text: string;
|
||||
location: {
|
||||
fileLink: FileLink;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
};
|
||||
}
|
||||
|
||||
export type ResultSeverity = 'Recommendation' | 'Warning' | 'Error';
|
||||
|
||||
/**
|
||||
* Returns the number of (raw + interpreted) results for an analysis.
|
||||
*/
|
||||
export const getAnalysisResultCount = (analysisResults: AnalysisResults): number => {
|
||||
const rawResultCount = analysisResults.rawResults?.resultSet.rows.length || 0;
|
||||
return analysisResults.interpretedResults.length + rawResultCount;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the total number of results for an analysis by adding all individual repo results.
|
||||
*/
|
||||
export const sumAnalysesResults = (analysesResults: AnalysisResults[]) =>
|
||||
analysesResults.reduce((acc, curr) => acc + getAnalysisResultCount(curr), 0);
|
||||
|
||||
@@ -2,24 +2,29 @@ import { DownloadLink } from '../download-link';
|
||||
import { AnalysisFailure } from './analysis-failure';
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
queryTitle: string;
|
||||
queryFileName: string;
|
||||
queryFilePath: string;
|
||||
queryText: string;
|
||||
language: string;
|
||||
workflowRunUrl: string;
|
||||
totalRepositoryCount: number;
|
||||
affectedRepositoryCount: number;
|
||||
totalResultCount: number;
|
||||
executionTimestamp: string;
|
||||
executionDuration: string;
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
analysisFailures: AnalysisFailure[];
|
||||
queryId: string,
|
||||
queryTitle: string,
|
||||
queryFileName: string,
|
||||
queryFilePath: string,
|
||||
queryText: string,
|
||||
language: string,
|
||||
workflowRunUrl: string,
|
||||
totalRepositoryCount: number,
|
||||
affectedRepositoryCount: number,
|
||||
totalResultCount: number,
|
||||
executionTimestamp: string,
|
||||
executionDuration: string,
|
||||
analysisSummaries: AnalysisSummary[],
|
||||
analysisFailures: AnalysisFailure[],
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string,
|
||||
databaseSha: string,
|
||||
resultCount: number,
|
||||
sourceLocationPrefix: string,
|
||||
downloadLink: DownloadLink,
|
||||
fileSize: string,
|
||||
starCount?: number,
|
||||
lastUpdated?: number,
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// The maximum number of raw results to read from a BQRS file.
|
||||
// This is used to avoid reading the entire result set into memory
|
||||
// and trying to render it on screen. Users will be warned if the
|
||||
// results are capped.
|
||||
export const MAX_RAW_RESULTS = 500;
|
||||
@@ -1,15 +1,25 @@
|
||||
import * as React from 'react';
|
||||
import { AnalysisAlert } from '../shared/analysis-result';
|
||||
import CodePaths from './CodePaths';
|
||||
import FileCodeSnippet from './FileCodeSnippet';
|
||||
|
||||
const AnalysisAlertResult = ({ alert }: { alert: AnalysisAlert }) => {
|
||||
const showPathsLink = alert.codeFlows.length > 0;
|
||||
|
||||
return <FileCodeSnippet
|
||||
filePath={alert.filePath}
|
||||
fileLink={alert.fileLink}
|
||||
codeSnippet={alert.codeSnippet}
|
||||
highlightedRegion={alert.highlightedRegion}
|
||||
severity={alert.severity}
|
||||
message={alert.message}
|
||||
messageChildren={
|
||||
showPathsLink && <CodePaths
|
||||
codeFlows={alert.codeFlows}
|
||||
ruleDescription={alert.shortDescription}
|
||||
severity={alert.severity}
|
||||
message={alert.message}
|
||||
/>
|
||||
}
|
||||
/>;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const BadgeContainer = styled.span`
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
padding-left: 0.2em;
|
||||
`;
|
||||
|
||||
const BadgeText = styled.span`
|
||||
display: inline-block;
|
||||
min-width: 1.5em;
|
||||
padding: 0.3em;
|
||||
border-radius: 35%;
|
||||
font-size: x-small;
|
||||
text-align: center;
|
||||
background: var(--vscode-badge-background);
|
||||
color: var(--vscode-badge-foreground);
|
||||
border-color: var(--vscode-badge-background);
|
||||
`;
|
||||
|
||||
const Badge = ({ text }: { text: string }) => (
|
||||
<BadgeContainer>
|
||||
<BadgeText>{text}</BadgeText>
|
||||
</BadgeContainer>
|
||||
);
|
||||
|
||||
export default Badge;
|
||||
178
extensions/ql-vscode/src/remote-queries/view/CodePaths.tsx
Normal file
178
extensions/ql-vscode/src/remote-queries/view/CodePaths.tsx
Normal file
@@ -0,0 +1,178 @@
|
||||
import { XCircleIcon } from '@primer/octicons-react';
|
||||
import { Overlay } from '@primer/react';
|
||||
import { VSCodeDropdown, VSCodeLink, VSCodeOption, VSCodeTag } from '@vscode/webview-ui-toolkit/react';
|
||||
import * as React from 'react';
|
||||
import { ChangeEvent, useRef, useState } from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeFlow, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import FileCodeSnippet from './FileCodeSnippet';
|
||||
import SectionTitle from './SectionTitle';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
|
||||
const StyledCloseButton = styled.button`
|
||||
position: absolute;
|
||||
top: 1em;
|
||||
right: 4em;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
border: none;
|
||||
&:focus-visible {
|
||||
outline: none
|
||||
}
|
||||
`;
|
||||
|
||||
const OverlayContainer = styled.div`
|
||||
padding: 1em;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
padding: 2em;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
overflow-y: scroll;
|
||||
`;
|
||||
|
||||
const CloseButton = ({ onClick }: { onClick: () => void }) => (
|
||||
<StyledCloseButton onClick={onClick} tabIndex={-1} >
|
||||
<XCircleIcon size={24} />
|
||||
</StyledCloseButton>
|
||||
);
|
||||
|
||||
const CodePath = ({
|
||||
codeFlow,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlow: CodeFlow;
|
||||
message: AnalysisMessage;
|
||||
severity: ResultSeverity;
|
||||
}) => {
|
||||
return <>
|
||||
{codeFlow.threadFlows.map((threadFlow, index) =>
|
||||
<div key={`thread-flow-${index}`} style={{ maxWidth: '55em' }}>
|
||||
{index !== 0 && <VerticalSpace size={3} />}
|
||||
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center' }}>
|
||||
<div style={{ flexGrow: 1, padding: 0, border: 'none' }}>
|
||||
<SectionTitle>Step {index + 1}</SectionTitle>
|
||||
</div>
|
||||
{index === 0 &&
|
||||
<div style={{ padding: 0, border: 'none' }}>
|
||||
<VSCodeTag>Source</VSCodeTag>
|
||||
</div>
|
||||
}
|
||||
{index === codeFlow.threadFlows.length - 1 &&
|
||||
<div style={{ padding: 0, border: 'none' }}>
|
||||
<VSCodeTag>Sink</VSCodeTag>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<FileCodeSnippet
|
||||
fileLink={threadFlow.fileLink}
|
||||
codeSnippet={threadFlow.codeSnippet}
|
||||
highlightedRegion={threadFlow.highlightedRegion}
|
||||
severity={severity}
|
||||
message={index === codeFlow.threadFlows.length - 1 ? message : threadFlow.message} />
|
||||
</div>
|
||||
)}
|
||||
</>;
|
||||
};
|
||||
|
||||
const getCodeFlowName = (codeFlow: CodeFlow) => {
|
||||
const filePath = codeFlow.threadFlows[codeFlow.threadFlows.length - 1].fileLink.filePath;
|
||||
return filePath.substring(filePath.lastIndexOf('/') + 1);
|
||||
};
|
||||
|
||||
const Menu = ({
|
||||
codeFlows,
|
||||
setSelectedCodeFlow
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
setSelectedCodeFlow: (value: React.SetStateAction<CodeFlow>) => void
|
||||
}) => {
|
||||
return <VSCodeDropdown
|
||||
onChange={(event: ChangeEvent<HTMLSelectElement>) => {
|
||||
const selectedOption = event.target;
|
||||
const selectedIndex = selectedOption.value as unknown as number;
|
||||
setSelectedCodeFlow(codeFlows[selectedIndex]);
|
||||
}}
|
||||
>
|
||||
{codeFlows.map((codeFlow, index) =>
|
||||
<VSCodeOption
|
||||
key={`codeflow-${index}'`}
|
||||
value={index}
|
||||
>
|
||||
{getCodeFlowName(codeFlow)}
|
||||
</VSCodeOption>
|
||||
)}
|
||||
</VSCodeDropdown>;
|
||||
};
|
||||
|
||||
const CodePaths = ({
|
||||
codeFlows,
|
||||
ruleDescription,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
ruleDescription: string,
|
||||
message: AnalysisMessage,
|
||||
severity: ResultSeverity
|
||||
}) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [selectedCodeFlow, setSelectedCodeFlow] = useState(codeFlows[0]);
|
||||
|
||||
const anchorRef = useRef<HTMLDivElement>(null);
|
||||
const linkRef = useRef<HTMLAnchorElement>(null);
|
||||
|
||||
const closeOverlay = () => setIsOpen(false);
|
||||
|
||||
return (
|
||||
<div ref={anchorRef}>
|
||||
<VSCodeLink
|
||||
onClick={() => setIsOpen(true)}
|
||||
ref={linkRef}
|
||||
sx={{ cursor: 'pointer' }}>
|
||||
Show paths
|
||||
</VSCodeLink>
|
||||
{isOpen && (
|
||||
<Overlay
|
||||
returnFocusRef={linkRef}
|
||||
onEscape={closeOverlay}
|
||||
onClickOutside={closeOverlay}
|
||||
anchorSide="outside-top">
|
||||
<OverlayContainer>
|
||||
<CloseButton onClick={closeOverlay} />
|
||||
|
||||
<SectionTitle>{ruleDescription}</SectionTitle>
|
||||
<VerticalSpace size={2} />
|
||||
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center' }}>
|
||||
<div style={{ padding: 0, border: 0 }}>
|
||||
{codeFlows.length} paths available: {selectedCodeFlow.threadFlows.length} steps in
|
||||
</div>
|
||||
<div style={{ flexGrow: 1, padding: 0, paddingLeft: '0.2em', border: 'none' }}>
|
||||
<Menu codeFlows={codeFlows} setSelectedCodeFlow={setSelectedCodeFlow} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<CodePath
|
||||
codeFlow={selectedCodeFlow}
|
||||
severity={severity}
|
||||
message={message} />
|
||||
|
||||
<VerticalSpace size={3} />
|
||||
|
||||
</OverlayContainer>
|
||||
</Overlay>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default CodePaths;
|
||||
@@ -1,19 +1,15 @@
|
||||
import { Spinner } from '@primer/react';
|
||||
import { VSCodeProgressRing } from '@vscode/webview-ui-toolkit/react';
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const SpinnerContainer = styled.span`
|
||||
vertical-align: middle;
|
||||
|
||||
svg {
|
||||
width: 0.8em;
|
||||
height: 0.8em;
|
||||
}
|
||||
display: inline-block;
|
||||
`;
|
||||
|
||||
const DownloadSpinner = () => (
|
||||
<SpinnerContainer>
|
||||
<Spinner size="small" />
|
||||
<VSCodeProgressRing style={{ height: '0.8em', width: '0.8em' }} />
|
||||
</SpinnerContainer>
|
||||
);
|
||||
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeSnippet, HighlightedRegion, ResultSeverity } from '../shared/analysis-result';
|
||||
import { Box, Link } from '@primer/react';
|
||||
import { CodeSnippet, FileLink, HighlightedRegion, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
import { createRemoteFileRef } from '../../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../../pure/sarif-utils';
|
||||
import { VSCodeLink } from '@vscode/webview-ui-toolkit/react';
|
||||
|
||||
const borderColor = 'var(--vscode-editor-snippetFinalTabstopHighlightBorder)';
|
||||
const warningColor = '#966C23';
|
||||
const highlightColor = '#534425';
|
||||
const highlightColor = 'var(--vscode-editor-findMatchHighlightBackground)';
|
||||
|
||||
const getSeverityColor = (severity: ResultSeverity) => {
|
||||
switch (severity) {
|
||||
@@ -19,24 +21,11 @@ const getSeverityColor = (severity: ResultSeverity) => {
|
||||
}
|
||||
};
|
||||
|
||||
const replaceSpaceChar = (text: string) => text.replaceAll(' ', '\u00a0');
|
||||
|
||||
const shouldHighlightLine = (lineNumber: number, highlightedRegion: HighlightedRegion) => {
|
||||
if (lineNumber < highlightedRegion.startLine) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (highlightedRegion.endLine == undefined) {
|
||||
return lineNumber == highlightedRegion.startLine;
|
||||
}
|
||||
|
||||
return lineNumber <= highlightedRegion.endLine;
|
||||
};
|
||||
const replaceSpaceAndTabChar = (text: string) => text.replaceAll(' ', '\u00a0').replaceAll('\t', '\u00a0\u00a0\u00a0\u00a0');
|
||||
|
||||
const Container = styled.div`
|
||||
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, Liberation Mono, monospace;
|
||||
font-size: x-small;
|
||||
width: 55em;
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
font-size: small;
|
||||
`;
|
||||
|
||||
const TitleContainer = styled.div`
|
||||
@@ -57,7 +46,7 @@ const CodeContainer = styled.div`
|
||||
`;
|
||||
|
||||
const MessageText = styled.div`
|
||||
font-size: x-small;
|
||||
font-size: small;
|
||||
padding-left: 0.5em;
|
||||
`;
|
||||
|
||||
@@ -66,155 +55,203 @@ const MessageContainer = styled.div`
|
||||
padding-bottom: 0.5em;
|
||||
`;
|
||||
|
||||
const PlainLine = ({ text }: { text: string }) => {
|
||||
return <span>{replaceSpaceChar(text)}</span>;
|
||||
const PlainCode = ({ text }: { text: string }) => {
|
||||
return <span>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const HighlightedLine = ({ text }: { text: string }) => {
|
||||
return <span style={{ backgroundColor: highlightColor }}>{replaceSpaceChar(text)}</span>;
|
||||
const HighlightedCode = ({ text }: { text: string }) => {
|
||||
return <span style={{ backgroundColor: highlightColor }}>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const Message = ({
|
||||
messageText,
|
||||
currentLineNumber,
|
||||
highlightedRegion,
|
||||
borderColor,
|
||||
message,
|
||||
borderLeftColor,
|
||||
children
|
||||
}: {
|
||||
messageText: string,
|
||||
currentLineNumber: number,
|
||||
highlightedRegion: HighlightedRegion,
|
||||
borderColor: string,
|
||||
message: AnalysisMessage,
|
||||
borderLeftColor: string,
|
||||
children: React.ReactNode
|
||||
}) => {
|
||||
if (highlightedRegion.startLine !== currentLineNumber) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return <MessageContainer>
|
||||
<Box
|
||||
borderColor="border.default"
|
||||
borderWidth={1}
|
||||
borderStyle="solid"
|
||||
borderLeftColor={borderColor}
|
||||
borderLeftWidth={3}
|
||||
paddingTop="1em"
|
||||
paddingBottom="1em">
|
||||
<MessageText>
|
||||
{messageText}
|
||||
{children && <>
|
||||
<VerticalSpace size={2} />
|
||||
{children}
|
||||
</>
|
||||
return <div style={{
|
||||
borderColor: borderColor,
|
||||
borderWidth: '0.1em',
|
||||
borderStyle: 'solid',
|
||||
borderLeftColor: borderLeftColor,
|
||||
borderLeftWidth: '0.3em',
|
||||
paddingTop: '1em',
|
||||
paddingBottom: '1em'
|
||||
}}>
|
||||
<MessageText>
|
||||
{message.tokens.map((token, index) => {
|
||||
switch (token.t) {
|
||||
case 'text':
|
||||
return <span key={`token-${index}`}>{token.text}</span>;
|
||||
case 'location':
|
||||
return <VSCodeLink
|
||||
style={{ fontFamily: 'var(--vscode-editor-font-family)' }}
|
||||
key={`token-${index}`}
|
||||
href={createRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine)}>
|
||||
{token.text}
|
||||
</VSCodeLink>;
|
||||
default:
|
||||
return <></>;
|
||||
}
|
||||
</MessageText>
|
||||
</Box>
|
||||
|
||||
</MessageContainer>;
|
||||
})}
|
||||
{children && <>
|
||||
<VerticalSpace size={2} />
|
||||
{children}
|
||||
</>
|
||||
}
|
||||
</MessageText>
|
||||
</div>;
|
||||
};
|
||||
|
||||
const CodeLine = ({
|
||||
const Code = ({
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
}: {
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
highlightedRegion?: HighlightedRegion
|
||||
}) => {
|
||||
if (!shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return <PlainLine text={line} />;
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return <PlainCode text={line} />;
|
||||
}
|
||||
|
||||
const isSingleLineHighlight = highlightedRegion.endLine === undefined;
|
||||
const isFirstHighlightedLine = lineNumber === highlightedRegion.startLine;
|
||||
const isLastHighlightedLine = lineNumber === highlightedRegion.endLine;
|
||||
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length;
|
||||
|
||||
const section1 = line.substring(0, highlightStartColumn - 1);
|
||||
const section2 = line.substring(highlightStartColumn - 1, highlightEndColumn - 1);
|
||||
const section3 = line.substring(highlightEndColumn - 1, line.length);
|
||||
const partiallyHighlightedLine = parseHighlightedLine(line, lineNumber, highlightedRegion);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PlainLine text={section1} />
|
||||
<HighlightedLine text={section2} />
|
||||
<PlainLine text={section3} />
|
||||
<PlainCode text={partiallyHighlightedLine.plainSection1} />
|
||||
<HighlightedCode text={partiallyHighlightedLine.highlightedSection} />
|
||||
<PlainCode text={partiallyHighlightedLine.plainSection2} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const Line = ({
|
||||
line,
|
||||
lineIndex,
|
||||
startingLineIndex,
|
||||
highlightedRegion,
|
||||
severity,
|
||||
message,
|
||||
messageChildren
|
||||
}: {
|
||||
line: string,
|
||||
lineIndex: number,
|
||||
startingLineIndex: number,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
const shouldShowMessage = message &&
|
||||
severity &&
|
||||
highlightedRegion &&
|
||||
highlightedRegion.endLine == startingLineIndex + lineIndex;
|
||||
|
||||
return <div>
|
||||
<div style={{ display: 'flex' }} >
|
||||
<div style={{
|
||||
borderStyle: 'none',
|
||||
paddingTop: '0.01em',
|
||||
paddingLeft: '0.5em',
|
||||
paddingRight: '0.5em',
|
||||
paddingBottom: '0.2em'
|
||||
}}>
|
||||
{startingLineIndex + lineIndex}
|
||||
</div>
|
||||
<div style={{
|
||||
flexGrow: 1,
|
||||
borderStyle: 'none',
|
||||
paddingTop: '0.01em',
|
||||
paddingLeft: '1.5em',
|
||||
paddingRight: '0.5em',
|
||||
paddingBottom: '0.2em',
|
||||
wordBreak: 'break-word'
|
||||
}}>
|
||||
<Code
|
||||
line={line}
|
||||
lineNumber={startingLineIndex + lineIndex}
|
||||
highlightedRegion={highlightedRegion} />
|
||||
</div>
|
||||
</div>
|
||||
{shouldShowMessage &&
|
||||
<MessageContainer>
|
||||
<Message
|
||||
message={message}
|
||||
borderLeftColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>
|
||||
</MessageContainer>
|
||||
}
|
||||
</div>;
|
||||
};
|
||||
|
||||
const FileCodeSnippet = ({
|
||||
filePath,
|
||||
fileLink,
|
||||
codeSnippet,
|
||||
highlightedRegion,
|
||||
severity,
|
||||
message,
|
||||
messageChildren,
|
||||
}: {
|
||||
filePath: string,
|
||||
codeSnippet: CodeSnippet,
|
||||
highlightedRegion: HighlightedRegion,
|
||||
fileLink: FileLink,
|
||||
codeSnippet?: CodeSnippet,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: string,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
const startingLine = codeSnippet?.startLine || 0;
|
||||
const endingLine = codeSnippet?.endLine || 0;
|
||||
|
||||
const startingLine = codeSnippet.startLine;
|
||||
const titleFileUri = createRemoteFileRef(
|
||||
fileLink,
|
||||
highlightedRegion?.startLine || startingLine,
|
||||
highlightedRegion?.endLine || endingLine);
|
||||
|
||||
if (!codeSnippet) {
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<VSCodeLink href={titleFileUri}>{fileLink.filePath}</VSCodeLink>
|
||||
</TitleContainer>
|
||||
{message && severity &&
|
||||
<Message
|
||||
message={message}
|
||||
borderLeftColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>}
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<Link>{filePath}</Link>
|
||||
<VSCodeLink href={titleFileUri}>{fileLink.filePath}</VSCodeLink>
|
||||
</TitleContainer>
|
||||
<CodeContainer>
|
||||
{code.map((line, index) => (
|
||||
<div key={index}>
|
||||
{message && severity && <Message
|
||||
messageText={message}
|
||||
currentLineNumber={startingLine + index}
|
||||
highlightedRegion={highlightedRegion}
|
||||
borderColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>}
|
||||
<Box display="flex">
|
||||
<Box
|
||||
p={2}
|
||||
borderStyle="none"
|
||||
paddingTop="0.01em"
|
||||
paddingLeft="0.5em"
|
||||
paddingRight="0.5em"
|
||||
paddingBottom="0.2em">
|
||||
{startingLine + index}
|
||||
</Box>
|
||||
<Box
|
||||
flexGrow={1}
|
||||
p={2}
|
||||
borderStyle="none"
|
||||
paddingTop="0.01em"
|
||||
paddingLeft="1.5em"
|
||||
paddingRight="0.5em"
|
||||
paddingBottom="0.2em">
|
||||
<CodeLine
|
||||
line={line}
|
||||
lineNumber={startingLine + index}
|
||||
highlightedRegion={highlightedRegion} />
|
||||
</Box>
|
||||
</Box>
|
||||
</div>
|
||||
<Line
|
||||
key={`line-${index}`}
|
||||
line={line}
|
||||
lineIndex={index}
|
||||
startingLineIndex={startingLine}
|
||||
highlightedRegion={highlightedRegion}
|
||||
severity={severity}
|
||||
message={message}
|
||||
messageChildren={messageChildren}
|
||||
/>
|
||||
))}
|
||||
</CodeContainer>
|
||||
</Container>
|
||||
|
||||
38
extensions/ql-vscode/src/remote-queries/view/LastUpdated.tsx
Normal file
38
extensions/ql-vscode/src/remote-queries/view/LastUpdated.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
import * as React from 'react';
|
||||
import { RepoPushIcon } from '@primer/octicons-react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { humanizeRelativeTime } from '../../pure/time';
|
||||
|
||||
const IconContainer = styled.span`
|
||||
flex-grow: 0;
|
||||
text-align: right;
|
||||
margin-right: 0;
|
||||
`;
|
||||
|
||||
const Duration = styled.span`
|
||||
text-align: left;
|
||||
width: 8em;
|
||||
margin-left: 0.5em;
|
||||
`;
|
||||
|
||||
type Props = { lastUpdated?: number };
|
||||
|
||||
const LastUpdated = ({ lastUpdated }: Props) => (
|
||||
// lastUpdated will be undefined for older results that were
|
||||
// created before the lastUpdated field was added.
|
||||
Number.isFinite(lastUpdated) ? (
|
||||
<>
|
||||
<IconContainer>
|
||||
<RepoPushIcon size={16} />
|
||||
</IconContainer>
|
||||
<Duration>
|
||||
{humanizeRelativeTime(lastUpdated)}
|
||||
</Duration>
|
||||
</>
|
||||
) : (
|
||||
<></>
|
||||
)
|
||||
);
|
||||
|
||||
export default LastUpdated;
|
||||
106
extensions/ql-vscode/src/remote-queries/view/RawResultsTable.tsx
Normal file
106
extensions/ql-vscode/src/remote-queries/view/RawResultsTable.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
import * as React from 'react';
|
||||
import { VSCodeLink } from '@vscode/webview-ui-toolkit/react';
|
||||
import { CellValue, RawResultSet, ResultSetSchema } from '../../pure/bqrs-cli-types';
|
||||
import { tryGetRemoteLocation } from '../../pure/bqrs-utils';
|
||||
import { useState } from 'react';
|
||||
import TextButton from './TextButton';
|
||||
import { convertNonPrintableChars } from '../../text-utils';
|
||||
|
||||
const borderColor = 'var(--vscode-editor-snippetFinalTabstopHighlightBorder)';
|
||||
|
||||
const numOfResultsInContractedMode = 5;
|
||||
|
||||
const Row = ({
|
||||
row,
|
||||
fileLinkPrefix,
|
||||
sourceLocationPrefix
|
||||
}: {
|
||||
row: CellValue[],
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
}) => (
|
||||
<>
|
||||
{row.map((cell, cellIndex) => (
|
||||
<div key={cellIndex} style={{
|
||||
borderColor: borderColor,
|
||||
borderStyle: 'solid',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
padding: '0.4rem',
|
||||
wordBreak: 'break-word'
|
||||
}}>
|
||||
<Cell value={cell} fileLinkPrefix={fileLinkPrefix} sourceLocationPrefix={sourceLocationPrefix} />
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
|
||||
const Cell = ({
|
||||
value,
|
||||
fileLinkPrefix,
|
||||
sourceLocationPrefix
|
||||
}: {
|
||||
value: CellValue,
|
||||
fileLinkPrefix: string
|
||||
sourceLocationPrefix: string
|
||||
}) => {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
return <span>{convertNonPrintableChars(value.toString())}</span>;
|
||||
case 'object': {
|
||||
const url = tryGetRemoteLocation(value.url, fileLinkPrefix, sourceLocationPrefix);
|
||||
const safeLabel = convertNonPrintableChars(value.label);
|
||||
if (url) {
|
||||
return <VSCodeLink href={url}>{safeLabel}</VSCodeLink>;
|
||||
} else {
|
||||
return <span>{safeLabel}</span>;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const RawResultsTable = ({
|
||||
schema,
|
||||
results,
|
||||
fileLinkPrefix,
|
||||
sourceLocationPrefix
|
||||
}: {
|
||||
schema: ResultSetSchema,
|
||||
results: RawResultSet,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
}) => {
|
||||
const [tableExpanded, setTableExpanded] = useState(false);
|
||||
const numOfResultsToShow = tableExpanded ? results.rows.length : numOfResultsInContractedMode;
|
||||
const showButton = results.rows.length > numOfResultsInContractedMode;
|
||||
|
||||
// Create n equal size columns. We use minmax(0, 1fr) because the
|
||||
// minimum width of 1fr is auto, not 0.
|
||||
// https://css-tricks.com/equal-width-columns-in-css-grid-are-kinda-weird/
|
||||
const gridTemplateColumns = `repeat(${schema.columns.length}, minmax(0, 1fr))`;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div style={{
|
||||
display: 'grid',
|
||||
gridTemplateColumns: gridTemplateColumns,
|
||||
maxWidth: '45rem',
|
||||
padding: '0.4rem'
|
||||
}}>
|
||||
{results.rows.slice(0, numOfResultsToShow).map((row, rowIndex) => (
|
||||
<Row key={rowIndex} row={row} fileLinkPrefix={fileLinkPrefix} sourceLocationPrefix={sourceLocationPrefix} />
|
||||
))}
|
||||
</div>
|
||||
{
|
||||
showButton &&
|
||||
<TextButton size='x-small' onClick={() => setTableExpanded(!tableExpanded)}>
|
||||
{tableExpanded ? (<span>View less</span>) : (<span>View all</span>)}
|
||||
</TextButton>
|
||||
}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default RawResultsTable;
|
||||
@@ -4,24 +4,30 @@ import * as Rdom from 'react-dom';
|
||||
import { Flash, ThemeProvider } from '@primer/react';
|
||||
import { ToRemoteQueriesMessage } from '../../pure/interface-types';
|
||||
import { AnalysisSummary, RemoteQueryResult } from '../shared/remote-query-result';
|
||||
|
||||
import { MAX_RAW_RESULTS } from '../shared/result-limits';
|
||||
import { vscode } from '../../view/vscode-api';
|
||||
|
||||
import { VSCodeBadge, VSCodeButton } from '@vscode/webview-ui-toolkit/react';
|
||||
import SectionTitle from './SectionTitle';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
import HorizontalSpace from './HorizontalSpace';
|
||||
import Badge from './Badge';
|
||||
import ViewTitle from './ViewTitle';
|
||||
import DownloadButton from './DownloadButton';
|
||||
import { AnalysisResults } from '../shared/analysis-result';
|
||||
import { AnalysisResults, getAnalysisResultCount } from '../shared/analysis-result';
|
||||
import DownloadSpinner from './DownloadSpinner';
|
||||
import CollapsibleItem from './CollapsibleItem';
|
||||
import { AlertIcon, CodeSquareIcon, FileCodeIcon, FileSymlinkFileIcon, RepoIcon, TerminalIcon } from '@primer/octicons-react';
|
||||
import { AlertIcon, CodeSquareIcon, FileCodeIcon, RepoIcon, TerminalIcon } from '@primer/octicons-react';
|
||||
import AnalysisAlertResult from './AnalysisAlertResult';
|
||||
import RawResultsTable from './RawResultsTable';
|
||||
import RepositoriesSearch from './RepositoriesSearch';
|
||||
import StarCount from './StarCount';
|
||||
import SortRepoFilter, { Sort, sorter } from './SortRepoFilter';
|
||||
import LastUpdated from './LastUpdated';
|
||||
import RepoListCopyButton from './RepoListCopyButton';
|
||||
|
||||
const numOfReposInContractedMode = 10;
|
||||
|
||||
const emptyQueryResult: RemoteQueryResult = {
|
||||
queryId: '',
|
||||
queryTitle: '',
|
||||
queryFileName: '',
|
||||
queryFilePath: '',
|
||||
@@ -51,13 +57,6 @@ const downloadAllAnalysesResults = (query: RemoteQueryResult) => {
|
||||
});
|
||||
};
|
||||
|
||||
const viewAnalysisResults = (analysisSummary: AnalysisSummary) => {
|
||||
vscode.postMessage({
|
||||
t: 'remoteQueryViewAnalysisResults',
|
||||
analysisSummary
|
||||
});
|
||||
};
|
||||
|
||||
const openQueryFile = (queryResult: RemoteQueryResult) => {
|
||||
vscode.postMessage({
|
||||
t: 'openFile',
|
||||
@@ -72,14 +71,18 @@ const openQueryTextVirtualFile = (queryResult: RemoteQueryResult) => {
|
||||
});
|
||||
};
|
||||
|
||||
function createResultsDescription(queryResult: RemoteQueryResult) {
|
||||
const reposCount = `${queryResult.totalRepositoryCount} ${queryResult.totalRepositoryCount === 1 ? 'repository' : 'repositories'}`;
|
||||
return `${queryResult.totalResultCount} results from running against ${reposCount} (${queryResult.executionDuration}), ${queryResult.executionTimestamp}`;
|
||||
}
|
||||
|
||||
const sumAnalysesResults = (analysesResults: AnalysisResults[]) =>
|
||||
analysesResults.reduce((acc, curr) => acc + curr.results.length, 0);
|
||||
analysesResults.reduce((acc, curr) => acc + getAnalysisResultCount(curr), 0);
|
||||
|
||||
const QueryInfo = (queryResult: RemoteQueryResult) => (
|
||||
<>
|
||||
<VerticalSpace size={1} />
|
||||
{queryResult.totalResultCount} results from running against {queryResult.totalRepositoryCount} repositories
|
||||
({queryResult.executionDuration}), {queryResult.executionTimestamp}
|
||||
{createResultsDescription(queryResult)}
|
||||
<VerticalSpace size={1} />
|
||||
<span>
|
||||
<a className="vscode-codeql__query-info-link" href="#" onClick={() => openQueryFile(queryResult)}>
|
||||
@@ -129,10 +132,14 @@ const Failures = (queryResult: RemoteQueryResult) => {
|
||||
|
||||
const SummaryTitleWithResults = ({
|
||||
queryResult,
|
||||
analysesResults
|
||||
analysesResults,
|
||||
sort,
|
||||
setSort
|
||||
}: {
|
||||
queryResult: RemoteQueryResult,
|
||||
analysesResults: AnalysisResults[]
|
||||
analysesResults: AnalysisResults[],
|
||||
sort: Sort,
|
||||
setSort: (sort: Sort) => void
|
||||
}) => {
|
||||
const showDownloadButton = queryResult.totalResultCount !== sumAnalysesResults(analysesResults);
|
||||
|
||||
@@ -144,6 +151,14 @@ const SummaryTitleWithResults = ({
|
||||
text="Download all"
|
||||
onClick={() => downloadAllAnalysesResults(queryResult)} />
|
||||
}
|
||||
<div style={{ flexGrow: 2, textAlign: 'right' }}>
|
||||
<RepoListCopyButton queryResult={queryResult} />
|
||||
<HorizontalSpace size={1} />
|
||||
<SortRepoFilter
|
||||
sort={sort}
|
||||
setSort={setSort}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -154,7 +169,7 @@ const SummaryTitleNoResults = () => (
|
||||
</div>
|
||||
);
|
||||
|
||||
const SummaryItemDownloadAndView = ({
|
||||
const SummaryItemDownload = ({
|
||||
analysisSummary,
|
||||
analysisResults
|
||||
}: {
|
||||
@@ -174,13 +189,7 @@ const SummaryItemDownloadAndView = ({
|
||||
</>;
|
||||
}
|
||||
|
||||
return <>
|
||||
<HorizontalSpace size={2} />
|
||||
<a className="vscode-codeql__analysis-result-file-link"
|
||||
onClick={() => viewAnalysisResults(analysisSummary)} >
|
||||
<FileSymlinkFileIcon size={16} />
|
||||
</a>
|
||||
</>;
|
||||
return <></>;
|
||||
};
|
||||
|
||||
const SummaryItem = ({
|
||||
@@ -190,24 +199,33 @@ const SummaryItem = ({
|
||||
analysisSummary: AnalysisSummary,
|
||||
analysisResults: AnalysisResults | undefined
|
||||
}) => (
|
||||
<span>
|
||||
<>
|
||||
<span className="vscode-codeql__analysis-item"><RepoIcon size={16} /></span>
|
||||
<span className="vscode-codeql__analysis-item">{analysisSummary.nwo}</span>
|
||||
<span className="vscode-codeql__analysis-item"><Badge text={analysisSummary.resultCount.toString()} /></span>
|
||||
<HorizontalSpace size={1} />
|
||||
<span className="vscode-codeql__analysis-item">
|
||||
<SummaryItemDownloadAndView
|
||||
<VSCodeBadge>{analysisSummary.resultCount.toString()}</VSCodeBadge>
|
||||
</span>
|
||||
<span className="vscode-codeql__analysis-item">
|
||||
<SummaryItemDownload
|
||||
analysisSummary={analysisSummary}
|
||||
analysisResults={analysisResults} />
|
||||
</span>
|
||||
</span>
|
||||
<StarCount starCount={analysisSummary.starCount} />
|
||||
<LastUpdated lastUpdated={analysisSummary.lastUpdated} />
|
||||
</>
|
||||
);
|
||||
|
||||
const Summary = ({
|
||||
queryResult,
|
||||
analysesResults
|
||||
analysesResults,
|
||||
sort,
|
||||
setSort
|
||||
}: {
|
||||
queryResult: RemoteQueryResult,
|
||||
analysesResults: AnalysisResults[]
|
||||
analysesResults: AnalysisResults[],
|
||||
sort: Sort,
|
||||
setSort: (sort: Sort) => void
|
||||
}) => {
|
||||
const [repoListExpanded, setRepoListExpanded] = useState(false);
|
||||
const numOfReposToShow = repoListExpanded ? queryResult.analysisSummaries.length : numOfReposInContractedMode;
|
||||
@@ -219,17 +237,21 @@ const Summary = ({
|
||||
? <SummaryTitleNoResults />
|
||||
: <SummaryTitleWithResults
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults} />
|
||||
analysesResults={analysesResults}
|
||||
sort={sort}
|
||||
setSort={setSort} />
|
||||
}
|
||||
|
||||
<ul className="vscode-codeql__flat-list">
|
||||
{queryResult.analysisSummaries.slice(0, numOfReposToShow).map((summary, i) =>
|
||||
<li key={summary.nwo} className="vscode-codeql__analysis-summaries-list-item">
|
||||
<SummaryItem
|
||||
analysisSummary={summary}
|
||||
analysisResults={analysesResults.find(a => a.nwo === summary.nwo)} />
|
||||
</li>
|
||||
)}
|
||||
{queryResult.analysisSummaries.slice(0, numOfReposToShow)
|
||||
.sort(sorter(sort))
|
||||
.map((summary, i) =>
|
||||
<li key={summary.nwo} className="vscode-codeql__analysis-summaries-list-item">
|
||||
<SummaryItem
|
||||
analysisSummary={summary}
|
||||
analysisResults={analysesResults.find(a => a.nwo === summary.nwo)} />
|
||||
</li>
|
||||
)}
|
||||
</ul>
|
||||
{
|
||||
queryResult.analysisSummaries.length > numOfReposInContractedMode &&
|
||||
@@ -249,39 +271,83 @@ const AnalysesResultsTitle = ({ totalAnalysesResults, totalResults }: { totalAna
|
||||
return <SectionTitle>{totalAnalysesResults}/{totalResults} results</SectionTitle>;
|
||||
};
|
||||
|
||||
const AnalysesResultsDescription = ({ totalAnalysesResults, totalResults }: { totalAnalysesResults: number, totalResults: number }) => {
|
||||
if (totalAnalysesResults < totalResults) {
|
||||
return <>
|
||||
<VerticalSpace size={1} />
|
||||
Some results haven't been downloaded automatically because of their size or because enough were downloaded already.
|
||||
Download them manually from the list above if you want to see them here.
|
||||
</>;
|
||||
}
|
||||
const exportResults = () => {
|
||||
vscode.postMessage({
|
||||
t: 'remoteQueryExportResults',
|
||||
});
|
||||
};
|
||||
|
||||
return <></>;
|
||||
const AnalysesResultsDescription = ({
|
||||
queryResult,
|
||||
analysesResults,
|
||||
}: {
|
||||
queryResult: RemoteQueryResult
|
||||
analysesResults: AnalysisResults[],
|
||||
}) => {
|
||||
const showDownloadsMessage = queryResult.analysisSummaries.some(
|
||||
s => !analysesResults.some(a => a.nwo === s.nwo && a.status === 'Completed'));
|
||||
const downloadsMessage = <>
|
||||
<VerticalSpace size={1} />
|
||||
Some results haven't been downloaded automatically because of their size or because enough were downloaded already.
|
||||
Download them manually from the list above if you want to see them here.
|
||||
</>;
|
||||
|
||||
const showMaxResultsMessage = analysesResults.some(a => a.rawResults?.capped);
|
||||
const maxRawResultsMessage = <>
|
||||
<VerticalSpace size={1} />
|
||||
Some repositories have more than {MAX_RAW_RESULTS} results. We will only show you up to
|
||||
{MAX_RAW_RESULTS} results for each repository.
|
||||
</>;
|
||||
|
||||
return (
|
||||
<>
|
||||
{showDownloadsMessage && downloadsMessage}
|
||||
{showMaxResultsMessage && maxRawResultsMessage}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const RepoAnalysisResults = (analysisResults: AnalysisResults) => {
|
||||
const numOfResults = getAnalysisResultCount(analysisResults);
|
||||
const title = <>
|
||||
{analysisResults.nwo}
|
||||
<Badge text={analysisResults.results.length.toString()} />
|
||||
<HorizontalSpace size={1} />
|
||||
<VSCodeBadge>{numOfResults.toString()}</VSCodeBadge>
|
||||
</>;
|
||||
|
||||
return (
|
||||
<CollapsibleItem title={title}>
|
||||
<ul className="vscode-codeql__flat-list" >
|
||||
{analysisResults.results.map((r, i) =>
|
||||
{analysisResults.interpretedResults.map((r, i) =>
|
||||
<li key={i}>
|
||||
<AnalysisAlertResult alert={r} />
|
||||
<VerticalSpace size={2} />
|
||||
</li>)}
|
||||
</ul>
|
||||
{analysisResults.rawResults &&
|
||||
<RawResultsTable
|
||||
schema={analysisResults.rawResults.schema}
|
||||
results={analysisResults.rawResults.resultSet}
|
||||
fileLinkPrefix={analysisResults.rawResults.fileLinkPrefix}
|
||||
sourceLocationPrefix={analysisResults.rawResults.sourceLocationPrefix} />
|
||||
}
|
||||
</CollapsibleItem>
|
||||
);
|
||||
};
|
||||
|
||||
const AnalysesResults = ({ analysesResults, totalResults }: { analysesResults: AnalysisResults[], totalResults: number }) => {
|
||||
const AnalysesResults = ({
|
||||
queryResult,
|
||||
analysesResults,
|
||||
totalResults,
|
||||
sort,
|
||||
}: {
|
||||
queryResult: RemoteQueryResult,
|
||||
analysesResults: AnalysisResults[],
|
||||
totalResults: number,
|
||||
sort: Sort
|
||||
}) => {
|
||||
const totalAnalysesResults = sumAnalysesResults(analysesResults);
|
||||
const [filterValue, setFilterValue] = React.useState('');
|
||||
|
||||
if (totalResults === 0) {
|
||||
return <></>;
|
||||
@@ -290,17 +356,34 @@ const AnalysesResults = ({ analysesResults, totalResults }: { analysesResults: A
|
||||
return (
|
||||
<>
|
||||
<VerticalSpace size={2} />
|
||||
<AnalysesResultsTitle
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
<div style={{ display: 'flex' }}>
|
||||
<div style={{ flexGrow: 1 }}>
|
||||
<AnalysesResultsTitle
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
</div>
|
||||
<div>
|
||||
<VSCodeButton onClick={exportResults}>Export all</VSCodeButton>
|
||||
</div>
|
||||
</div>
|
||||
<AnalysesResultsDescription
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults} />
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<RepositoriesSearch
|
||||
filterValue={filterValue}
|
||||
setFilterValue={setFilterValue} />
|
||||
|
||||
<ul className="vscode-codeql__flat-list">
|
||||
{analysesResults.filter(a => a.results.length > 0).map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
{analysesResults
|
||||
.filter(a => a.interpretedResults.length > 0 || a.rawResults)
|
||||
.filter(a => a.nwo.toLowerCase().includes(filterValue.toLowerCase()))
|
||||
.sort(sorter(sort))
|
||||
.map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
</ul>
|
||||
</>
|
||||
);
|
||||
@@ -309,6 +392,7 @@ const AnalysesResults = ({ analysesResults, totalResults }: { analysesResults: A
|
||||
export function RemoteQueries(): JSX.Element {
|
||||
const [queryResult, setQueryResult] = useState<RemoteQueryResult>(emptyQueryResult);
|
||||
const [analysesResults, setAnalysesResults] = useState<AnalysisResults[]>([]);
|
||||
const [sort, setSort] = useState<Sort>('name');
|
||||
|
||||
useEffect(() => {
|
||||
window.addEventListener('message', (evt: MessageEvent) => {
|
||||
@@ -331,18 +415,26 @@ export function RemoteQueries(): JSX.Element {
|
||||
return <div>Waiting for results to load.</div>;
|
||||
}
|
||||
|
||||
const showAnalysesResults = false;
|
||||
|
||||
try {
|
||||
return <div>
|
||||
<ThemeProvider colorMode="auto">
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Failures {...queryResult} />
|
||||
<Summary queryResult={queryResult} analysesResults={analysesResults} />
|
||||
{showAnalysesResults && <AnalysesResults analysesResults={analysesResults} totalResults={queryResult.totalResultCount} />}
|
||||
</ThemeProvider>
|
||||
</div>;
|
||||
return (
|
||||
<div className="vscode-codeql__remote-queries">
|
||||
<ThemeProvider colorMode="auto">
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Failures {...queryResult} />
|
||||
<Summary
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults}
|
||||
sort={sort}
|
||||
setSort={setSort} />
|
||||
<AnalysesResults
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults}
|
||||
totalResults={queryResult.totalResultCount}
|
||||
sort={sort} />
|
||||
</ThemeProvider>
|
||||
</div>
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return <div>There was an error displaying the view.</div>;
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import * as React from 'react';
|
||||
import { vscode } from '../../view/vscode-api';
|
||||
import { RemoteQueryResult } from '../shared/remote-query-result';
|
||||
import { CopyIcon } from '@primer/octicons-react';
|
||||
import { IconButton } from '@primer/react';
|
||||
|
||||
const copyRepositoryList = (queryResult: RemoteQueryResult) => {
|
||||
vscode.postMessage({
|
||||
t: 'copyRepoList',
|
||||
queryId: queryResult.queryId
|
||||
});
|
||||
};
|
||||
|
||||
const RepoListCopyButton = ({ queryResult }: { queryResult: RemoteQueryResult }) => (
|
||||
<IconButton
|
||||
aria-label="Copy repository list"
|
||||
icon={CopyIcon}
|
||||
variant="invisible"
|
||||
size="small"
|
||||
sx={{ 'text-align': 'right' }}
|
||||
onClick={() => copyRepositoryList(queryResult)} />
|
||||
);
|
||||
|
||||
export default RepoListCopyButton;
|
||||
@@ -0,0 +1,24 @@
|
||||
import * as React from 'react';
|
||||
import { VSCodeTextField } from '@vscode/webview-ui-toolkit/react';
|
||||
|
||||
interface RepositoriesSearchProps {
|
||||
filterValue: string;
|
||||
setFilterValue: (value: string) => void;
|
||||
}
|
||||
|
||||
const RepositoriesSearch = ({ filterValue, setFilterValue }: RepositoriesSearchProps) => {
|
||||
return <>
|
||||
<VSCodeTextField
|
||||
style={{ width: '100%' }}
|
||||
placeholder='Filter by repository owner/name'
|
||||
ariaLabel="Repository search"
|
||||
name="repository-search"
|
||||
value={filterValue}
|
||||
onInput={(e: InputEvent) => setFilterValue((e.target as HTMLInputElement).value)}
|
||||
>
|
||||
<span slot="start" className="codicon codicon-search"></span>
|
||||
</VSCodeTextField>
|
||||
</>;
|
||||
};
|
||||
|
||||
export default RepositoriesSearch;
|
||||
@@ -0,0 +1,83 @@
|
||||
import * as React from 'react';
|
||||
import { FilterIcon } from '@primer/octicons-react';
|
||||
import { ActionList, ActionMenu, IconButton } from '@primer/react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const SortWrapper = styled.span`
|
||||
flex-grow: 2;
|
||||
text-align: right;
|
||||
margin-right: 0;
|
||||
`;
|
||||
|
||||
export type Sort = 'name' | 'stars' | 'results' | 'lastUpdated';
|
||||
type Props = {
|
||||
sort: Sort;
|
||||
setSort: (sort: Sort) => void;
|
||||
};
|
||||
|
||||
type Sortable = {
|
||||
nwo: string;
|
||||
starCount?: number;
|
||||
resultCount?: number;
|
||||
lastUpdated?: number;
|
||||
};
|
||||
|
||||
const sortBy = [
|
||||
{ name: 'Sort by Name', sort: 'name' },
|
||||
{ name: 'Sort by Results', sort: 'results' },
|
||||
{ name: 'Sort by Stars', sort: 'stars' },
|
||||
{ name: 'Sort by Last Updated', sort: 'lastUpdated' },
|
||||
];
|
||||
|
||||
export function sorter(sort: Sort): (left: Sortable, right: Sortable) => number {
|
||||
// stars and results are highest to lowest
|
||||
// name is alphabetical
|
||||
return (left: Sortable, right: Sortable) => {
|
||||
if (sort === 'stars') {
|
||||
const stars = (right.starCount || 0) - (left.starCount || 0);
|
||||
if (stars !== 0) {
|
||||
return stars;
|
||||
}
|
||||
}
|
||||
if (sort === 'lastUpdated') {
|
||||
const lastUpdated = (right.lastUpdated || 0) - (left.lastUpdated || 0);
|
||||
if (lastUpdated !== 0) {
|
||||
return lastUpdated;
|
||||
}
|
||||
}
|
||||
if (sort === 'results') {
|
||||
const results = (right.resultCount || 0) - (left.resultCount || 0);
|
||||
if (results !== 0) {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back on name compare if results, stars, or lastUpdated are equal
|
||||
return left.nwo.localeCompare(right.nwo, undefined, { sensitivity: 'base' });
|
||||
};
|
||||
}
|
||||
|
||||
const SortRepoFilter = ({ sort, setSort }: Props) => {
|
||||
return <SortWrapper>
|
||||
<ActionMenu>
|
||||
<ActionMenu.Anchor>
|
||||
<IconButton icon={FilterIcon} variant="invisible" aria-label="Sort results" />
|
||||
</ActionMenu.Anchor>
|
||||
|
||||
<ActionMenu.Overlay width="small" anchorSide="outside-bottom">
|
||||
<ActionList selectionVariant="single">
|
||||
{sortBy.map((type, index) => (
|
||||
<ActionList.Item
|
||||
key={index}
|
||||
selected={type.sort === sort} onSelect={() => setSort(type.sort as Sort)}
|
||||
>
|
||||
{type.name}
|
||||
</ActionList.Item>
|
||||
))}
|
||||
</ActionList>
|
||||
</ActionMenu.Overlay>
|
||||
</ActionMenu>
|
||||
</SortWrapper>;
|
||||
};
|
||||
|
||||
export default SortRepoFilter;
|
||||
45
extensions/ql-vscode/src/remote-queries/view/StarCount.tsx
Normal file
45
extensions/ql-vscode/src/remote-queries/view/StarCount.tsx
Normal file
@@ -0,0 +1,45 @@
|
||||
import * as React from 'react';
|
||||
import { StarIcon } from '@primer/octicons-react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const Star = styled.span`
|
||||
flex-grow: 2;
|
||||
text-align: right;
|
||||
margin-right: 0;
|
||||
`;
|
||||
|
||||
const Count = styled.span`
|
||||
text-align: left;
|
||||
width: 2em;
|
||||
margin-left: 0.5em;
|
||||
margin-right: 1.5em;
|
||||
`;
|
||||
|
||||
type Props = { starCount?: number };
|
||||
|
||||
const StarCount = ({ starCount }: Props) => (
|
||||
Number.isFinite(starCount) ? (
|
||||
<>
|
||||
<Star>
|
||||
<StarIcon size={16} />
|
||||
</Star>
|
||||
<Count>
|
||||
{displayStars(starCount!)}
|
||||
</Count>
|
||||
</>
|
||||
) : (
|
||||
<></>
|
||||
)
|
||||
);
|
||||
|
||||
function displayStars(starCount: number) {
|
||||
if (starCount > 10000) {
|
||||
return `${(starCount / 1000).toFixed(0)}k`;
|
||||
}
|
||||
if (starCount > 1000) {
|
||||
return `${(starCount / 1000).toFixed(1)}k`;
|
||||
}
|
||||
return starCount.toFixed(0);
|
||||
}
|
||||
|
||||
export default StarCount;
|
||||
30
extensions/ql-vscode/src/remote-queries/view/TextButton.tsx
Normal file
30
extensions/ql-vscode/src/remote-queries/view/TextButton.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
type Size = 'x-small' | 'small' | 'medium' | 'large' | 'x-large';
|
||||
|
||||
const StyledButton = styled.button<{ size: Size }>`
|
||||
background: none;
|
||||
color: var(--vscode-textLink-foreground);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
font-size: ${props => props.size};
|
||||
`;
|
||||
|
||||
const TextButton = ({
|
||||
size,
|
||||
onClick,
|
||||
children
|
||||
}: {
|
||||
size: Size,
|
||||
onClick: () => void,
|
||||
children: React.ReactNode
|
||||
}) => (
|
||||
<StyledButton
|
||||
size={size}
|
||||
onClick={onClick}>
|
||||
{children}
|
||||
</StyledButton>
|
||||
);
|
||||
|
||||
export default TextButton;
|
||||
@@ -1,3 +1,7 @@
|
||||
.vscode-codeql__remote-queries {
|
||||
max-width: 55em;
|
||||
}
|
||||
|
||||
.vscode-codeql__query-info-link {
|
||||
text-decoration: none;
|
||||
padding-right: 1em;
|
||||
@@ -10,10 +14,12 @@
|
||||
|
||||
.vscode-codeql__query-summary-container {
|
||||
padding-top: 1.5em;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-summaries-list-item {
|
||||
margin-top: 0.5em;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.vscode-codeql__analyses-results-list-item {
|
||||
@@ -33,10 +39,6 @@
|
||||
font-size: x-small;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-result-file-link {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-failure {
|
||||
margin: 0;
|
||||
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
|
||||
|
||||
@@ -29,13 +29,15 @@ import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { DatabaseInfo, QueryMetadata } from './pure/interface-types';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { InitialQueryInfo } from './query-results';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { isQuickQueryPath } from './quick-query';
|
||||
import { compileDatabaseUpgradeSequence, hasNondestructiveUpgradeCapabilities, upgradeDatabaseExplicit } from './upgrades';
|
||||
import { ensureMetadataIsComplete } from './query-results';
|
||||
import { SELECT_QUERY_NAME } from './contextual/locationFinder';
|
||||
import { DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
import { parseVisualizerData } from './pure/log-summary-parser';
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
@@ -94,6 +96,22 @@ export class QueryEvaluationInfo {
|
||||
return qsClient.findQueryLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogPath() {
|
||||
return qsClient.findQueryEvalLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogSummaryPath() {
|
||||
return qsClient.findQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get jsonEvalLogSummaryPath() {
|
||||
return qsClient.findJsonQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogEndSummaryPath() {
|
||||
return qsClient.findQueryEvalLogEndSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, 'results.bqrs'),
|
||||
@@ -124,6 +142,7 @@ export class QueryEvaluationInfo {
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
queryInfo?: LocalQueryInfo,
|
||||
): Promise<messages.EvaluationResult> {
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
@@ -155,6 +174,13 @@ export class QueryEvaluationInfo {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
};
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.startLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
|
||||
}
|
||||
const params: messages.EvaluateQueriesParams = {
|
||||
db: dataset,
|
||||
evaluateId: callbackId,
|
||||
@@ -171,6 +197,20 @@ export class QueryEvaluationInfo {
|
||||
}
|
||||
} finally {
|
||||
qs.unRegisterCallback(callbackId);
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.endLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
if (await this.hasEvalLog()) {
|
||||
this.displayHumanReadableLogSummary(queryInfo, qs);
|
||||
if (config.isCanary()) {
|
||||
this.parseJsonLogSummary(qs.cliServer);
|
||||
}
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result || {
|
||||
evaluationTime: 0,
|
||||
@@ -284,34 +324,119 @@ export class QueryEvaluationInfo {
|
||||
return this.dilPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has a completed structured evaluator log
|
||||
*/
|
||||
async hasEvalLog(): Promise<boolean> {
|
||||
return fs.pathExists(this.evalLogPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the appropriate CLI command to generate a human-readable log summary
|
||||
* and logs to the Query Server console and query log file.
|
||||
*/
|
||||
displayHumanReadableLogSummary(queryInfo: LocalQueryInfo, qs: qsClient.QueryServerClient): void {
|
||||
queryInfo.evalLogLocation = this.evalLogPath;
|
||||
void qs.cliServer.generateLogSummary(this.evalLogPath, this.evalLogSummaryPath, this.evalLogEndSummaryPath)
|
||||
.then(() => {
|
||||
queryInfo.evalLogSummaryLocation = this.evalLogSummaryPath;
|
||||
fs.readFile(this.evalLogEndSummaryPath, (err, buffer) => {
|
||||
if (err) {
|
||||
throw new Error(`Could not read structured evaluator log end of summary file at ${this.evalLogEndSummaryPath}.`);
|
||||
}
|
||||
void qs.logger.log(' --- Evaluator Log Summary --- ', { additionalLogLocation: this.logPath });
|
||||
void qs.logger.log(buffer.toString(), { additionalLogLocation: this.logPath });
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
void showAndLogWarningMessage(`Failed to generate human-readable structured evaluator log summary. Reason: ${err.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the appropriate CLI command to generate a JSON log summary and parse it
|
||||
* into the appropriate data model for the log visualizer.
|
||||
*/
|
||||
parseJsonLogSummary(cliServer: cli.CodeQLCliServer): void {
|
||||
void cliServer.generateJsonLogSummary(this.evalLogPath, this.jsonEvalLogSummaryPath)
|
||||
.then(() => {
|
||||
// TODO(angelapwen): Stream the file in.
|
||||
fs.readFile(this.jsonEvalLogSummaryPath, (err, buffer) => {
|
||||
if (err) {
|
||||
throw new Error(`Could not read structured evaluator log summary JSON file at ${this.jsonEvalLogSummaryPath}.`);
|
||||
}
|
||||
parseVisualizerData(buffer.toString()); // Eventually this return value will feed into the tree visualizer.
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
void showAndLogWarningMessage(`Failed to generate JSON structured evaluator log summary. Reason: ${err.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CSV file containing the results of this query. This will only be called if the query
|
||||
* does not have interpreted results and the CSV file does not already exist.
|
||||
*
|
||||
* @return Promise<true> if the operation creates the file. Promise<false> if the operation does
|
||||
* not create the file.
|
||||
*
|
||||
* @throws Error if the operation fails.
|
||||
*/
|
||||
async exportCsvResults(qs: qsClient.QueryServerClient, csvPath: string, onFinish: () => void): Promise<void> {
|
||||
async exportCsvResults(qs: qsClient.QueryServerClient, csvPath: string): Promise<boolean> {
|
||||
const resultSet = await this.chooseResultSet(qs);
|
||||
if (!resultSet) {
|
||||
void showAndLogWarningMessage('Query has no result set.');
|
||||
return false;
|
||||
}
|
||||
let stopDecoding = false;
|
||||
const out = fs.createWriteStream(csvPath);
|
||||
out.on('finish', onFinish);
|
||||
out.on('error', () => {
|
||||
if (!stopDecoding) {
|
||||
stopDecoding = true;
|
||||
void showAndLogErrorMessage(`Failed to write CSV results to ${csvPath}`);
|
||||
}
|
||||
|
||||
const promise: Promise<boolean> = new Promise((resolve, reject) => {
|
||||
out.on('finish', () => resolve(true));
|
||||
out.on('error', () => {
|
||||
if (!stopDecoding) {
|
||||
stopDecoding = true;
|
||||
reject(new Error(`Failed to write CSV results to ${csvPath}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let nextOffset: number | undefined = 0;
|
||||
while (nextOffset !== undefined && !stopDecoding) {
|
||||
const chunk: DecodedBqrsChunk = await qs.cliServer.bqrsDecode(this.resultsPaths.resultsPath, SELECT_QUERY_NAME, {
|
||||
do {
|
||||
const chunk: DecodedBqrsChunk = await qs.cliServer.bqrsDecode(this.resultsPaths.resultsPath, resultSet, {
|
||||
pageSize: 100,
|
||||
offset: nextOffset,
|
||||
});
|
||||
for (const tuple of chunk.tuples) {
|
||||
out.write(tuple.join(',') + '\n');
|
||||
}
|
||||
chunk.tuples.forEach((tuple) => {
|
||||
out.write(tuple.map((v, i) =>
|
||||
chunk.columns[i].kind === 'String'
|
||||
? `"${typeof v === 'string' ? v.replaceAll('"', '""') : v}"`
|
||||
: v
|
||||
).join(',') + '\n');
|
||||
});
|
||||
nextOffset = chunk.next;
|
||||
}
|
||||
} while (nextOffset && !stopDecoding);
|
||||
out.end();
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose the name of the result set to run. If the `#select` set exists, use that. Otherwise,
|
||||
* arbitrarily choose the first set. Most of the time, this will be correct.
|
||||
*
|
||||
* If the query has no result sets, then return undefined.
|
||||
*/
|
||||
async chooseResultSet(qs: qsClient.QueryServerClient) {
|
||||
const resultSets = (await qs.cliServer.bqrsInfo(this.resultsPaths.resultsPath, 0))['result-sets'];
|
||||
if (!resultSets.length) {
|
||||
return undefined;
|
||||
}
|
||||
if (resultSets.find(r => r.name === SELECT_QUERY_NAME)) {
|
||||
return SELECT_QUERY_NAME;
|
||||
}
|
||||
return resultSets[0].name;
|
||||
}
|
||||
/**
|
||||
* Returns the path to the CSV alerts interpretation of this query results. If CSV results have
|
||||
* not yet been produced, this will return first create the CSV results and then return the path.
|
||||
@@ -657,6 +782,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: messages.TemplateDefinitions,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
@@ -698,10 +824,16 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
|
||||
|
||||
let availableMlModels: cli.MlModelInfo[] = [];
|
||||
if (await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
if (!await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
void logger.log('Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.');
|
||||
} else {
|
||||
try {
|
||||
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders)).models;
|
||||
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders, initialInfo.queryPath)).models;
|
||||
if (availableMlModels.length) {
|
||||
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||
} else {
|
||||
void logger.log('Did not find any available ML models.');
|
||||
}
|
||||
} catch (e) {
|
||||
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
|
||||
`query without any ML models: ${e}.`;
|
||||
@@ -742,7 +874,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token);
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
@@ -790,7 +922,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
await upgradeDir?.cleanup();
|
||||
} catch (e) {
|
||||
void qs.logger.log(
|
||||
`Could not clean up the upgrades dir. Reason: ${e.message || e}`,
|
||||
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||
{ additionalLogLocation: query.logPath }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,17 +4,18 @@ import { parser } from 'stream-json';
|
||||
import { pick } from 'stream-json/filters/Pick';
|
||||
import Assembler = require('stream-json/Assembler');
|
||||
import { chain } from 'stream-chain';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const DUMMY_TOOL : Sarif.Tool = {driver: {name: ''}};
|
||||
const DUMMY_TOOL: Sarif.Tool = { driver: { name: '' } };
|
||||
|
||||
export async function sarifParser(interpretedResultsPath: string) : Promise<Sarif.Log> {
|
||||
export async function sarifParser(interpretedResultsPath: string): Promise<Sarif.Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const p = parser();
|
||||
const pipeline = chain([
|
||||
fs.createReadStream(interpretedResultsPath),
|
||||
p,
|
||||
pick({filter: 'runs.0.results'})
|
||||
pick({ filter: 'runs.0.results' })
|
||||
]);
|
||||
|
||||
// Creates JavaScript objects from the token stream
|
||||
@@ -26,23 +27,23 @@ export async function sarifParser(interpretedResultsPath: string) : Promise<Sari
|
||||
pipeline.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
|
||||
asm.on('done', (asm) => {
|
||||
|
||||
const log : Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
const log: Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
runs: [
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
results: asm.current ?? []
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
resolve(log);
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${err.stderr || err}`);
|
||||
} catch (e) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${(e as any).stderr || getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ConfigurationTarget, Extension, ExtensionContext, ConfigurationChangeEvent } from 'vscode';
|
||||
import TelemetryReporter from 'vscode-extension-telemetry';
|
||||
import { ConfigListener, CANARY_FEATURES, ENABLE_TELEMETRY, GLOBAL_ENABLE_TELEMETRY, LOG_TELEMETRY } from './config';
|
||||
import { ConfigListener, CANARY_FEATURES, ENABLE_TELEMETRY, GLOBAL_ENABLE_TELEMETRY, LOG_TELEMETRY, isIntegrationTestMode } from './config';
|
||||
import * as appInsights from 'applicationinsights';
|
||||
import { logger } from './logging';
|
||||
import { UserCancellationException } from './commandRunner';
|
||||
@@ -162,7 +162,11 @@ export class TelemetryListener extends ConfigListener {
|
||||
if (!this.wasTelemetryRequested()) {
|
||||
// if global telemetry is disabled, avoid showing the dialog or making any changes
|
||||
let result = undefined;
|
||||
if (GLOBAL_ENABLE_TELEMETRY.getValue()) {
|
||||
if (
|
||||
GLOBAL_ENABLE_TELEMETRY.getValue() &&
|
||||
// Avoid showing the dialog if we are in integration test mode.
|
||||
!isIntegrationTestMode()
|
||||
) {
|
||||
// Extension won't start until this completes.
|
||||
result = await showBinaryChoiceWithUrlDialog(
|
||||
'Does the CodeQL Extension by GitHub have your permission to collect usage data and metrics to help us improve CodeQL for VSCode?',
|
||||
|
||||
@@ -76,7 +76,7 @@ export class QLTestAdapterFactory extends DisposableObject {
|
||||
* @param ext The new extension, including the `.`.
|
||||
*/
|
||||
function changeExtension(p: string, ext: string): string {
|
||||
return p.substr(0, p.length - path.extname(p).length) + ext;
|
||||
return p.slice(0, -path.extname(p).length) + ext;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
31
extensions/ql-vscode/src/text-utils.ts
Normal file
31
extensions/ql-vscode/src/text-utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
const CONTROL_CODE = '\u001F'.codePointAt(0)!;
|
||||
const CONTROL_LABEL = '\u2400'.codePointAt(0)!;
|
||||
|
||||
/**
|
||||
* Converts the given text so that any non-printable characters are replaced.
|
||||
* @param label The text to convert.
|
||||
* @returns The converted text.
|
||||
*/
|
||||
export function convertNonPrintableChars(label: string | undefined) {
|
||||
// If the label was empty, use a placeholder instead, so the link is still clickable.
|
||||
if (!label) {
|
||||
return '[empty string]';
|
||||
} else if (label.match(/^\s+$/)) {
|
||||
return `[whitespace: "${label}"]`;
|
||||
} else {
|
||||
/**
|
||||
* If the label contains certain non-printable characters, loop through each
|
||||
* character and replace it with the cooresponding unicode control label.
|
||||
*/
|
||||
const convertedLabelArray: any[] = [];
|
||||
for (let i = 0; i < label.length; i++) {
|
||||
const labelCheck = label.codePointAt(i)!;
|
||||
if (labelCheck <= CONTROL_CODE) {
|
||||
convertedLabelArray[i] = String.fromCodePoint(labelCheck + CONTROL_LABEL);
|
||||
} else {
|
||||
convertedLabelArray[i] = label.charAt(i);
|
||||
}
|
||||
}
|
||||
return convertedLabelArray.join('');
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { renderLocation } from './result-table-utils';
|
||||
import { ColumnValue } from '../pure/bqrs-cli-types';
|
||||
import { CellValue } from '../pure/bqrs-cli-types';
|
||||
|
||||
interface Props {
|
||||
value: ColumnValue;
|
||||
value: CellValue;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { RawResultsSortState, QueryMetadata, SortDirection } from '../pure/inter
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { ResultSet } from '../pure/interface-types';
|
||||
import { vscode } from './vscode-api';
|
||||
import { convertNonPrintableChars } from '../text-utils';
|
||||
|
||||
export interface ResultTableProps {
|
||||
resultSet: ResultSet;
|
||||
@@ -37,9 +38,6 @@ export const oddRowClassName = 'vscode-codeql__result-table-row--odd';
|
||||
export const pathRowClassName = 'vscode-codeql__result-table-row--path';
|
||||
export const selectedRowClassName = 'vscode-codeql__result-table-row--selected';
|
||||
|
||||
const CONTROL_CODE = '\u001F'.codePointAt(0)!;
|
||||
const CONTROL_LABEL = '\u2400'.codePointAt(0)!;
|
||||
|
||||
export function jumpToLocationHandler(
|
||||
loc: ResolvableLocationValue,
|
||||
databaseUri: string,
|
||||
@@ -70,30 +68,6 @@ export function openFile(filePath: string): void {
|
||||
});
|
||||
}
|
||||
|
||||
function convertedNonprintableChars(label: string) {
|
||||
// If the label was empty, use a placeholder instead, so the link is still clickable.
|
||||
if (!label) {
|
||||
return '[empty string]';
|
||||
} else if (label.match(/^\s+$/)) {
|
||||
return `[whitespace: "${label}"]`;
|
||||
} else {
|
||||
/**
|
||||
* If the label contains certain non-printable characters, loop through each
|
||||
* character and replace it with the cooresponding unicode control label.
|
||||
*/
|
||||
const convertedLabelArray: any[] = [];
|
||||
for (let i = 0; i < label.length; i++) {
|
||||
const labelCheck = label.codePointAt(i)!;
|
||||
if (labelCheck <= CONTROL_CODE) {
|
||||
convertedLabelArray[i] = String.fromCodePoint(labelCheck + CONTROL_LABEL);
|
||||
} else {
|
||||
convertedLabelArray[i] = label.charAt(i);
|
||||
}
|
||||
}
|
||||
return convertedLabelArray.join('');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a location as a link which when clicked displays the original location.
|
||||
*/
|
||||
@@ -105,7 +79,7 @@ export function renderLocation(
|
||||
callback?: () => void
|
||||
): JSX.Element {
|
||||
|
||||
const displayLabel = convertedNonprintableChars(label!);
|
||||
const displayLabel = convertNonPrintableChars(label!);
|
||||
|
||||
if (loc === undefined) {
|
||||
return <span>{displayLabel}</span>;
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
dependencies: {}
|
||||
compiled: false
|
||||
lockVersion: 1.0.0
|
||||
@@ -9,7 +9,7 @@ import { CodeQLCliServer } from '../../cli';
|
||||
import { DatabaseManager } from '../../databases';
|
||||
import { promptImportLgtmDatabase, importArchiveDatabase, promptImportInternetDatabase } from '../../databaseFetcher';
|
||||
import { ProgressCallback } from '../../commandRunner';
|
||||
import { dbLoc, DB_URL, storagePath } from './global.helper';
|
||||
import { cleanDatabases, dbLoc, DB_URL, storagePath } from './global.helper';
|
||||
|
||||
/**
|
||||
* Run various integration tests for databases
|
||||
@@ -18,7 +18,7 @@ describe('Databases', function() {
|
||||
this.timeout(60000);
|
||||
|
||||
const LGTM_URL = 'https://lgtm.com/projects/g/aeisenberg/angular-bind-notifier/';
|
||||
|
||||
|
||||
let databaseManager: DatabaseManager;
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let inputBoxStub: sinon.SinonStub;
|
||||
@@ -27,6 +27,14 @@ describe('Databases', function() {
|
||||
|
||||
beforeEach(async () => {
|
||||
try {
|
||||
sandbox = sinon.createSandbox();
|
||||
// the uri.fsPath function on windows returns a lowercase drive letter
|
||||
// so, force the storage path string to be lowercase, too.
|
||||
progressCallback = sandbox.spy();
|
||||
inputBoxStub = sandbox.stub(window, 'showInputBox');
|
||||
sandbox.stub(window, 'showErrorMessage');
|
||||
sandbox.stub(window, 'showInformationMessage');
|
||||
|
||||
const extension = await extensions.getExtension<CodeQLExtensionInterface | Record<string, never>>('GitHub.vscode-codeql')!.activate();
|
||||
if ('databaseManager' in extension) {
|
||||
databaseManager = extension.databaseManager;
|
||||
@@ -34,21 +42,18 @@ describe('Databases', function() {
|
||||
throw new Error('Extension not initialized. Make sure cli is downloaded and installed properly.');
|
||||
}
|
||||
|
||||
sandbox = sinon.createSandbox();
|
||||
// the uri.fsPath function on windows returns a lowercase drive letter
|
||||
// so, force the storage path string to be lowercase, too.
|
||||
progressCallback = sandbox.spy();
|
||||
inputBoxStub = sandbox.stub(window, 'showInputBox');
|
||||
await cleanDatabases(databaseManager);
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
afterEach(async () => {
|
||||
try {
|
||||
sandbox.restore();
|
||||
await cleanDatabases(databaseManager);
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -4,13 +4,23 @@ import * as fs from 'fs-extra';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
import { fail } from 'assert';
|
||||
import { ConfigurationTarget, extensions, workspace } from 'vscode';
|
||||
import { commands, ConfigurationTarget, extensions, workspace } from 'vscode';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
import { DatabaseManager } from '../../databases';
|
||||
|
||||
// This file contains helpers shared between actual tests.
|
||||
|
||||
export const DB_URL = 'https://github.com/github/vscode-codeql/files/5586722/simple-db.zip';
|
||||
|
||||
process.addListener('unhandledRejection', (reason) => {
|
||||
if (reason instanceof Error && reason.message === 'Canceled') {
|
||||
console.log('Cancellation requested after the test has ended.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
fail(String(reason));
|
||||
}
|
||||
});
|
||||
|
||||
// We need to resolve the path, but the final three segments won't exist until later, so we only resolve the
|
||||
// first portion of the path.
|
||||
export const dbLoc = path.join(fs.realpathSync(path.join(__dirname, '../../../')), 'build/tests/db.zip');
|
||||
@@ -65,7 +75,7 @@ export default function(mocha: Mocha) {
|
||||
}
|
||||
);
|
||||
|
||||
// ensure etension is cleaned up.
|
||||
// ensure extension is cleaned up.
|
||||
(mocha.options as any).globalTeardown.push(
|
||||
async () => {
|
||||
const extension = await extensions.getExtension<CodeQLExtensionInterface | Record<string, never>>('GitHub.vscode-codeql')!.activate();
|
||||
@@ -83,4 +93,25 @@ export default function(mocha: Mocha) {
|
||||
removeStorage?.();
|
||||
}
|
||||
);
|
||||
|
||||
// check that the codeql folder is found in the workspace
|
||||
(mocha.options as any).globalSetup.push(
|
||||
async () => {
|
||||
const folders = workspace.workspaceFolders;
|
||||
if (!folders) {
|
||||
fail('\n\n\nNo workspace folders found.\nYou will need a local copy of the codeql repo.\nMake sure you specify the path to it in launch.json.\nIt should be something along the lines of "${workspaceRoot}/../codeql" depending on where you have your local copy of the codeql repo.\n\n\n');
|
||||
} else {
|
||||
const codeqlFolder = folders.find(folder => folder.name === 'codeql');
|
||||
if (!codeqlFolder) {
|
||||
fail('\n\n\nNo workspace folders found.\nYou will need a local copy of the codeql repo.\nMake sure you specify the path to it in launch.json.\nIt should be something along the lines of "${workspaceRoot}/../codeql" depending on where you have your local copy of the codeql repo.\n\n\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function cleanDatabases(databaseManager: DatabaseManager) {
|
||||
for (const item of databaseManager.databaseItems) {
|
||||
await commands.executeCommand('codeQLDatabases.removeDatabase', item);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import * as path from 'path';
|
||||
import { extensions } from 'vscode';
|
||||
import 'mocha';
|
||||
|
||||
import { CodeQLCliServer } from '../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import 'source-map-support/register';
|
||||
import 'vscode-test';
|
||||
import { runTestsInDirectory } from '../index-template';
|
||||
import 'mocha';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import * as chai from 'chai';
|
||||
import 'chai/register-should';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(sinonChai);
|
||||
|
||||
// The simple database used throughout the tests
|
||||
export function run(): Promise<void> {
|
||||
return runTestsInDirectory(__dirname, true);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import * as sinon from 'sinon';
|
||||
import { extensions, window } from 'vscode';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as pq from 'proxyquire';
|
||||
@@ -8,6 +7,7 @@ import * as pq from 'proxyquire';
|
||||
import { CliVersionConstraint, CodeQLCliServer } from '../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
import { expect } from 'chai';
|
||||
import { getErrorMessage } from '../../pure/helpers-pure';
|
||||
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
|
||||
@@ -27,6 +27,17 @@ describe('Packaging commands', function() {
|
||||
|
||||
beforeEach(async function() {
|
||||
sandbox = sinon.createSandbox();
|
||||
progress = sandbox.spy();
|
||||
quickPickSpy = sandbox.stub(window, 'showQuickPick');
|
||||
inputBoxSpy = sandbox.stub(window, 'showInputBox');
|
||||
showAndLogErrorMessageSpy = sandbox.stub();
|
||||
showAndLogInformationMessageSpy = sandbox.stub();
|
||||
mod = proxyquire('../../packaging', {
|
||||
'./helpers': {
|
||||
showAndLogErrorMessage: showAndLogErrorMessageSpy,
|
||||
showAndLogInformationMessage: showAndLogInformationMessageSpy,
|
||||
},
|
||||
});
|
||||
|
||||
const extension = await extensions
|
||||
.getExtension<CodeQLExtensionInterface | Record<string, never>>(
|
||||
@@ -45,17 +56,6 @@ describe('Packaging commands', function() {
|
||||
}. Skipping this test.`);
|
||||
this.skip();
|
||||
}
|
||||
progress = sandbox.spy();
|
||||
quickPickSpy = sandbox.stub(window, 'showQuickPick');
|
||||
inputBoxSpy = sandbox.stub(window, 'showInputBox');
|
||||
showAndLogErrorMessageSpy = sandbox.stub();
|
||||
showAndLogInformationMessageSpy = sandbox.stub();
|
||||
mod = proxyquire('../../packaging', {
|
||||
'./helpers': {
|
||||
showAndLogErrorMessage: showAndLogErrorMessageSpy,
|
||||
showAndLogInformationMessage: showAndLogInformationMessageSpy,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -121,8 +121,8 @@ describe('Packaging commands', function() {
|
||||
await mod.handleInstallPackDependencies(cli, progress);
|
||||
// This line should not be reached
|
||||
expect(true).to.be.false;
|
||||
} catch (error) {
|
||||
expect(error.message).to.contain('Unable to install pack dependencies');
|
||||
} catch (e) {
|
||||
expect(getErrorMessage(e)).to.contain('Unable to install pack dependencies');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,13 +3,12 @@ import { CancellationToken, commands, ExtensionContext, extensions, Uri } from '
|
||||
import * as sinon from 'sinon';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import 'mocha';
|
||||
import { expect } from 'chai';
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
import { DatabaseItem, DatabaseManager } from '../../databases';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
import { dbLoc, storagePath } from './global.helper';
|
||||
import { cleanDatabases, dbLoc, storagePath } from './global.helper';
|
||||
import { importArchiveDatabase } from '../../databaseFetcher';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from '../../run-queries';
|
||||
import { CodeQLCliServer } from '../../cli';
|
||||
@@ -39,6 +38,8 @@ describe('Queries', function() {
|
||||
let ctx: ExtensionContext;
|
||||
|
||||
let qlpackFile: string;
|
||||
let qlpackLockFile: string;
|
||||
let oldQlpackLockFile: string; // codeql v2.6.3 and earlier
|
||||
let qlFile: string;
|
||||
|
||||
|
||||
@@ -53,16 +54,23 @@ describe('Queries', function() {
|
||||
qs = extension.qs;
|
||||
cli.quiet = true;
|
||||
ctx = extension.ctx;
|
||||
qlpackFile = `${ctx.storagePath}/quick-queries/qlpack.yml`;
|
||||
qlFile = `${ctx.storagePath}/quick-queries/quick-query.ql`;
|
||||
qlpackFile = `${ctx.storageUri?.fsPath}/quick-queries/qlpack.yml`;
|
||||
qlpackLockFile = `${ctx.storageUri?.fsPath}/quick-queries/codeql-pack.lock.yml`;
|
||||
oldQlpackLockFile = `${ctx.storageUri?.fsPath}/quick-queries/qlpack.lock.yml`;
|
||||
qlFile = `${ctx.storageUri?.fsPath}/quick-queries/quick-query.ql`;
|
||||
} else {
|
||||
throw new Error('Extension not initialized. Make sure cli is downloaded and installed properly.');
|
||||
}
|
||||
|
||||
// Ensure we are starting from a clean slate.
|
||||
safeDel(qlFile);
|
||||
safeDel(qlpackFile);
|
||||
|
||||
progress = sandbox.spy();
|
||||
token = {} as CancellationToken;
|
||||
|
||||
// Add a database
|
||||
// Add a database, but make sure the database manager is empty first
|
||||
await cleanDatabases(databaseManager);
|
||||
const uri = Uri.file(dbLoc);
|
||||
const maybeDbItem = await importArchiveDatabase(
|
||||
uri.toString(true),
|
||||
@@ -78,15 +86,18 @@ describe('Queries', function() {
|
||||
}
|
||||
dbItem = maybeDbItem;
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
afterEach(async () => {
|
||||
try {
|
||||
sandbox.restore();
|
||||
safeDel(qlpackFile);
|
||||
safeDel(qlFile);
|
||||
await cleanDatabases(databaseManager);
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -107,7 +118,7 @@ describe('Queries', function() {
|
||||
expect(result.result.resultType).to.eq(QueryResultType.SUCCESS);
|
||||
} catch (e) {
|
||||
console.error('Test Failed');
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -131,32 +142,39 @@ describe('Queries', function() {
|
||||
expect(result.result.resultType).to.eq(QueryResultType.SUCCESS);
|
||||
} catch (e) {
|
||||
console.error('Test Failed');
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
it('should create a quick query', async () => {
|
||||
safeDel(qlFile);
|
||||
safeDel(qlpackFile);
|
||||
|
||||
await commands.executeCommand('codeQL.quickQuery');
|
||||
|
||||
// should have created the quick query file and query pack file
|
||||
expect(fs.pathExistsSync(qlFile)).to.be.true;
|
||||
expect(fs.pathExistsSync(qlpackFile)).to.be.true;
|
||||
|
||||
const qlpackContents: any = await yaml.safeLoad(
|
||||
const qlpackContents: any = await yaml.load(
|
||||
fs.readFileSync(qlpackFile, 'utf8')
|
||||
);
|
||||
// Should have chosen the js libraries
|
||||
expect(qlpackContents.libraryPathDependencies[0]).to.include('javascript');
|
||||
expect(qlpackContents.dependencies['codeql/javascript-all']).to.eq('*');
|
||||
|
||||
// Should also have a codeql-pack.lock.yml file
|
||||
const packFileToUse = fs.pathExistsSync(qlpackLockFile) ? qlpackLockFile : oldQlpackLockFile;
|
||||
const qlpackLock: any = await yaml.load(
|
||||
fs.readFileSync(packFileToUse, 'utf8')
|
||||
);
|
||||
expect(!!qlpackLock.dependencies['codeql/javascript-all'].version).to.be.true;
|
||||
});
|
||||
|
||||
it('should avoid creating a quick query', async () => {
|
||||
fs.writeFileSync(qlpackFile, yaml.safeDump({
|
||||
fs.mkdirpSync(path.dirname(qlpackFile));
|
||||
fs.writeFileSync(qlpackFile, yaml.dump({
|
||||
name: 'quick-query',
|
||||
version: '1.0.0',
|
||||
libraryPathDependencies: ['codeql-javascript']
|
||||
dependencies: {
|
||||
'codeql/javascript-all': '*'
|
||||
}
|
||||
}));
|
||||
fs.writeFileSync(qlFile, 'xxx');
|
||||
await commands.executeCommand('codeQL.quickQuery');
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user