Compare commits
329 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
096d7719c6 | ||
|
|
619c485224 | ||
|
|
9367d5fb45 | ||
|
|
50ec97ad91 | ||
|
|
fa5fcde987 | ||
|
|
5b33333404 | ||
|
|
cf50624e4e | ||
|
|
ccc9ed8b49 | ||
|
|
141f5381e7 | ||
|
|
be054ca4f8 | ||
|
|
0a06452450 | ||
|
|
b840d3f9bf | ||
|
|
c829c30688 | ||
|
|
7947afb1b4 | ||
|
|
c058e7a128 | ||
|
|
1dc663339d | ||
|
|
351db4efc8 | ||
|
|
12d6ea3966 | ||
|
|
e1adc7b428 | ||
|
|
dc34adadcd | ||
|
|
6e06381640 | ||
|
|
f55389cd26 | ||
|
|
6d930f53ba | ||
|
|
f7616cf685 | ||
|
|
f55d9820bd | ||
|
|
befc2cddd2 | ||
|
|
ef268e043f | ||
|
|
e10d2aef8e | ||
|
|
a97c5fe836 | ||
|
|
9b6eddddae | ||
|
|
ed84825e65 | ||
|
|
cb84003c31 | ||
|
|
a1cd87aa3a | ||
|
|
7d3b015e20 | ||
|
|
7d0d11f526 | ||
|
|
eb2520e7ca | ||
|
|
2675bf464e | ||
|
|
b638449498 | ||
|
|
e12bf63f9a | ||
|
|
ffcc1f82f1 | ||
|
|
04d7b12dd8 | ||
|
|
3e33b00a75 | ||
|
|
12dc378fc1 | ||
|
|
bbe99f4451 | ||
|
|
91b17f8fa6 | ||
|
|
69f1778309 | ||
|
|
c55e801d00 | ||
|
|
b363f77a83 | ||
|
|
f55f46f95b | ||
|
|
5ee2f0efe1 | ||
|
|
1314a36ba4 | ||
|
|
2b8b621298 | ||
|
|
aed4c9fc58 | ||
|
|
604001dfb1 | ||
|
|
1a03c0e4ac | ||
|
|
a8c54b7640 | ||
|
|
9bb60c9474 | ||
|
|
0b2ce7a071 | ||
|
|
44145baca7 | ||
|
|
dac7881ca3 | ||
|
|
31bd927959 | ||
|
|
908a862dd1 | ||
|
|
6676ba99d0 | ||
|
|
6d3c6e598f | ||
|
|
e1a10fc827 | ||
|
|
2ebdbaafa3 | ||
|
|
a74dfea08b | ||
|
|
44ff380c86 | ||
|
|
0a41713253 | ||
|
|
f5a5675da4 | ||
|
|
7a8cf55090 | ||
|
|
7932de3b7d | ||
|
|
c8ba967a54 | ||
|
|
f5d2f0e0ca | ||
|
|
2c7e2f4b7f | ||
|
|
ee3ebe687b | ||
|
|
77024f0757 | ||
|
|
c0e39886eb | ||
|
|
6339e7897d | ||
|
|
783a8a8772 | ||
|
|
8f2d865999 | ||
|
|
d6d0825926 | ||
|
|
37de2e7f52 | ||
|
|
800c9e0c93 | ||
|
|
a1bc7eb4d5 | ||
|
|
8ff45d2aee | ||
|
|
8ec19777b5 | ||
|
|
3e388fedeb | ||
|
|
83ffba2f08 | ||
|
|
f1c4fef8ba | ||
|
|
eec506a209 | ||
|
|
2ca0060c6a | ||
|
|
8b2d79a7f7 | ||
|
|
c4db8b6d4b | ||
|
|
61d4305593 | ||
|
|
542e1d24aa | ||
|
|
47ec074cfb | ||
|
|
e44835e795 | ||
|
|
2e28146a58 | ||
|
|
85e051a76d | ||
|
|
7027a61e63 | ||
|
|
e8c5b27d92 | ||
|
|
a3deec7875 | ||
|
|
6282a462c8 | ||
|
|
dac5952e96 | ||
|
|
ada6fcb908 | ||
|
|
8d2f902420 | ||
|
|
fc3fe7a81e | ||
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
14608fe5f7 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 | ||
|
|
3f2c9b647c | ||
|
|
7d5b4369c1 | ||
|
|
aade33fa88 | ||
|
|
2a8a90bdfc | ||
|
|
f36048cc95 | ||
|
|
517feeca21 | ||
|
|
9436a49118 | ||
|
|
0e02cb08fd | ||
|
|
26244efc50 | ||
|
|
6339eeffe5 | ||
|
|
8cc2f598eb | ||
|
|
46a1dd57f4 | ||
|
|
9d99fc521e | ||
|
|
bcf79354ee | ||
|
|
27a8636bac | ||
|
|
92a99938c9 | ||
|
|
ed61eb0a95 | ||
|
|
50d495b522 | ||
|
|
526d5c2c44 | ||
|
|
1720f9201e | ||
|
|
e62de1ca22 | ||
|
|
d052ddb742 | ||
|
|
af53a02ea5 | ||
|
|
8e2d18da8c | ||
|
|
2c5004387d | ||
|
|
3fc3b259ba | ||
|
|
cd95f68692 | ||
|
|
59c3b1ba2f | ||
|
|
fa85865fe5 | ||
|
|
5575d4142c | ||
|
|
ae6263a07f | ||
|
|
9af75634fa | ||
|
|
04b8681272 | ||
|
|
d5549f2894 | ||
|
|
b510b85ca0 | ||
|
|
5ad754a3a2 | ||
|
|
4f04f9db6e | ||
|
|
025a1a1383 | ||
|
|
f28c1f91d9 | ||
|
|
c609377a9c | ||
|
|
2579d12f24 | ||
|
|
c18f7953e7 | ||
|
|
3a292b02b6 | ||
|
|
7baf2d0a2a | ||
|
|
328289eb1c | ||
|
|
95d93eeb61 | ||
|
|
b54cc27cab | ||
|
|
c9ca1ee7b3 | ||
|
|
649d6d94a3 | ||
|
|
bf68d21830 | ||
|
|
64b33b76cb | ||
|
|
c189df3fd6 | ||
|
|
277869ebca | ||
|
|
303513a566 | ||
|
|
8712106b3d | ||
|
|
cdb9506583 | ||
|
|
94a311a550 | ||
|
|
791e7e9c4d | ||
|
|
6cfa7e2cd3 | ||
|
|
7196c26181 | ||
|
|
735f177283 | ||
|
|
f857e5ec6c | ||
|
|
a5e02950c2 | ||
|
|
4a928f1298 | ||
|
|
f59012862e | ||
|
|
5f5418a297 | ||
|
|
548a216b56 | ||
|
|
c943c89fc6 | ||
|
|
06de6077ba | ||
|
|
cef1fcc95d | ||
|
|
1ed8b225db | ||
|
|
f0354c87f4 | ||
|
|
5e06a615cd | ||
|
|
e11aa7af18 | ||
|
|
f4ddc17851 | ||
|
|
ebce2826cb | ||
|
|
4c411acef4 | ||
|
|
ddc941f464 | ||
|
|
c5ff2c6f76 | ||
|
|
85ac16bb22 | ||
|
|
e7ee4a33c7 | ||
|
|
ac0da04542 | ||
|
|
3337117970 | ||
|
|
9b61ff5714 | ||
|
|
d25db48452 | ||
|
|
251f354076 | ||
|
|
9c6ae226fb | ||
|
|
a502ee85d1 | ||
|
|
eec72e0cbd | ||
|
|
7a1acce133 | ||
|
|
84b4bfe663 | ||
|
|
16df990183 | ||
|
|
969dd26041 | ||
|
|
9df1f91318 | ||
|
|
48ddc66d47 | ||
|
|
85e3869607 | ||
|
|
5bb2a763e3 | ||
|
|
2110709d72 | ||
|
|
493033edc0 | ||
|
|
bf8e77b9b9 | ||
|
|
c7e5581027 | ||
|
|
c78802a1ed | ||
|
|
39f9c082b9 | ||
|
|
ca1ef5192d | ||
|
|
1d6fef9169 | ||
|
|
81f80ddbe5 | ||
|
|
b53657344c | ||
|
|
95e818898e | ||
|
|
a7e014a87e | ||
|
|
cca65e5a48 | ||
|
|
a75249f3e4 | ||
|
|
053a4b0392 | ||
|
|
d1362bf44f | ||
|
|
580832ea7b | ||
|
|
ddca0bb851 | ||
|
|
d9a04ea895 | ||
|
|
48ccb27e49 | ||
|
|
a2b5ad07ff | ||
|
|
cc9cbf7f06 | ||
|
|
ad5c43c9ba | ||
|
|
9c27d01d47 | ||
|
|
64ac33e3bb | ||
|
|
329fb87e12 | ||
|
|
d49e6e19a6 | ||
|
|
7785dfead2 | ||
|
|
29c29f9e3a |
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "extensions/ql-vscode"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: ".github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
16
.github/workflows/dependency-review.yml
vendored
Normal file
16
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: 'Dependency Review'
|
||||
on:
|
||||
- pull_request
|
||||
- workflow_dispatch
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v1
|
||||
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.0', 'nightly']
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.2', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '10.18.1'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -12,7 +12,6 @@
|
||||
// Add a reference to a workspace to open. Eg-
|
||||
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -46,7 +45,6 @@
|
||||
"ts-node/register",
|
||||
"test/pure-tests/**/*.ts"
|
||||
],
|
||||
"port": 9229,
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
@@ -60,10 +58,10 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -77,11 +75,11 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu",
|
||||
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -95,6 +93,7 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-gpu",
|
||||
"--disable-extension",
|
||||
"eamodio.gitlens",
|
||||
@@ -121,7 +120,6 @@
|
||||
// This option overrides the CLI_VERSION option.
|
||||
// "CLI_PATH": "${workspaceRoot}/../semmle-code/target/intree/codeql/codeql",
|
||||
},
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
|
||||
@@ -56,8 +56,6 @@ We recommend that you keep `npm run watch` running in the backgound and you only
|
||||
|
||||
1. on first checkout
|
||||
2. whenever any of the non-TypeScript resources have changed
|
||||
3. on any change to files included in one of the webviews
|
||||
- **Important**: This is easy to forget. You must explicitly run `npm run build` whenever one of the files in the webview is changed. These are the files in the `src/view` and `src/compare/view` folders.
|
||||
|
||||
### Installing the extension
|
||||
|
||||
@@ -124,6 +122,7 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
@@ -143,12 +142,7 @@ To regenerate the Open VSX token:
|
||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||
|
||||
To regenerate the VSCode Marketplace token:
|
||||
|
||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
||||
|
||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
||||
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
@@ -1,9 +1,43 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.6.6 - 17 May 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.5 - 25 April 2022
|
||||
|
||||
- Re-enable publishing to open-vsx. [#1285](https://github.com/github/vscode-codeql/pull/1285)
|
||||
|
||||
## 1.6.4 - 6 April 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.3 - 4 April 2022
|
||||
|
||||
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||
|
||||
## 1.6.1 - 17 March 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.0 - 7 March 2022
|
||||
|
||||
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
|
||||
- Open the query server logs for query errors (instead of the extension log). This will make it easier to track down query errors. [#1158](https://github.com/github/vscode-codeql/pull/1158)
|
||||
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||
|
||||
## 1.5.11 - 10 February 2022
|
||||
|
||||
- Fix a bug where invoking _View AST_ from the file explorer would not view the selected file. Instead it would view the active editor. Also, prevent the _View AST_ from appearing if the current selection includes a directory or multiple files. [#1113](https://github.com/github/vscode-codeql/pull/1113)
|
||||
- Add query history items as soon as a query is run, including new icons for each history item. [#1094](https://github.com/github/vscode-codeql/pull/1094)
|
||||
- Save query history items across restarts. Items will be saved for 30 days and can be overwritten by setting the `codeQL.queryHistory.ttl` configuration setting. [#1130](https://github.com/github/vscode-codeql/pull/1130)
|
||||
- Allow in-progress query items to be cancelled from the query history view. [#1105](https://github.com/github/vscode-codeql/pull/1105)
|
||||
|
||||
## 1.5.10 - 25 January 2022
|
||||
@@ -30,7 +64,7 @@
|
||||
- Fix a bug with importing large databases. Databases over 4GB can now be imported directly from LGTM or from a zip file. This functionality is only available when using CodeQL CLI version 2.6.0 or later. [#971](https://github.com/github/vscode-codeql/pull/971)
|
||||
- Replace certain control codes (`U+0000` - `U+001F`) with their corresponding control labels (`U+2400` - `U+241F`) in the results view. [#963](https://github.com/github/vscode-codeql/pull/963)
|
||||
- Allow case-insensitive project slugs for GitHub repositories when adding a CodeQL database from LGTM. [#978](https://github.com/github/vscode-codeql/pull/961)
|
||||
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See https://codeql.github.com/docs/codeql-cli/testing-query-help-files for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See [the CodeQL CLI docs](https://codeql.github.com/docs/codeql-cli/testing-query-help-files) for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||
- Make "Open Referenced File" command accessible from the active editor menu. [#989](https://github.com/github/vscode-codeql/pull/989)
|
||||
- Fix a bug where result set names in the result set drop-down were disappearing when viewing a sorted table. [#1007](https://github.com/github/vscode-codeql/pull/1007)
|
||||
- Allow query result locations with 0 as the end column value. These are treated as the first column in the line. [#1002](https://github.com/github/vscode-codeql/pull/1002)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as replace from 'gulp-replace';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require('gulp-replace');
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
export function injectAppInsightsKey() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as jsonc from 'jsonc-parser';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface DeployedPackage {
|
||||
@@ -28,7 +27,7 @@ async function copyPackage(sourcePath: string, destPath: string): Promise<void>
|
||||
|
||||
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
||||
try {
|
||||
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Default to development build; use flag --release to indicate release build.
|
||||
const isDevBuild = !process.argv.includes('--release');
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput, watchCss } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView } from './webpack';
|
||||
import { compileView, watchView } from './webpack';
|
||||
import { packageExtension } from './package';
|
||||
import { injectAppInsightsKey } from './appInsights';
|
||||
|
||||
@@ -14,5 +14,15 @@ export const buildWithoutPackage =
|
||||
)
|
||||
);
|
||||
|
||||
export { cleanOutput, compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchTypeScript,
|
||||
watchView,
|
||||
compileTypeScript,
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
watchCss
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"strict": true,
|
||||
"module": "commonjs",
|
||||
"target": "es2017",
|
||||
"lib": ["es6"],
|
||||
"lib": ["ES2021"],
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"rootDir": ".",
|
||||
@@ -16,7 +16,8 @@
|
||||
"noImplicitReturns": true,
|
||||
"experimentalDecorators": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,10 @@ export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
export function watchCss() {
|
||||
gulp.watch('src/**/*.css', copyViewCss);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/**/view/*.css')
|
||||
|
||||
@@ -2,7 +2,23 @@ import * as webpack from 'webpack';
|
||||
import { config } from './webpack.config';
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
webpack(config).run((error, stats) => {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
}
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
@@ -20,11 +36,16 @@ export function compileView(cb: (err?: Error) => void) {
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
if (failOnError) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
} else {
|
||||
console.error('Compilation errors detected.');
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
|
||||
4
extensions/ql-vscode/media/dark/github.svg
Normal file
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
16
extensions/ql-vscode/media/globe.svg
Normal file
16
extensions/ql-vscode/media/globe.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="7.5" cy="7.5" r="7" stroke="#959DA5"/>
|
||||
<mask id="mask0_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="15">
|
||||
<circle cx="7.5" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_394_2982)">
|
||||
<path d="M14.5 7.5C14.5 9.42971 13.6822 11.1907 12.5493 12.4721C11.4035 13.7683 10.0054 14.5 8.90625 14.5C7.84644 14.5 6.81131 13.8113 6.01569 12.5383C5.22447 11.2724 4.71875 9.49235 4.71875 7.5C4.71875 5.50765 5.22447 3.72765 6.01569 2.4617C6.81131 1.1887 7.84644 0.5 8.90625 0.5C10.0054 0.5 11.4035 1.23172 12.5493 2.52786C13.6822 3.80934 14.5 5.57029 14.5 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<mask id="mask1_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="1" y="0" width="16" height="15">
|
||||
<circle cx="9.375" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_394_2982)">
|
||||
<path d="M10.2812 7.5C10.2812 9.49235 9.77553 11.2724 8.98431 12.5383C8.18869 13.8113 7.15356 14.5 6.09375 14.5C4.99456 14.5 3.5965 13.7683 2.45067 12.4721C1.31781 11.1907 0.5 9.42971 0.5 7.5C0.5 5.57029 1.31781 3.80934 2.45067 2.52786C3.5965 1.23172 4.99456 0.5 6.09375 0.5C7.15356 0.5 8.18869 1.1887 8.98431 2.4617C9.77553 3.72765 10.2812 5.50765 10.2812 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<line y1="7.5" x2="15" y2="7.5" stroke="#959DA5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
4339
extensions/ql-vscode/package-lock.json
generated
4339
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.5.11",
|
||||
"version": "1.6.6",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -14,15 +14,14 @@
|
||||
},
|
||||
"engines": {
|
||||
"vscode": "^1.59.0",
|
||||
"node": "^14.17.1",
|
||||
"npm": "^7.20.6"
|
||||
"node": ">=16.11.25",
|
||||
"npm": ">=7.20.6"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
],
|
||||
"extensionDependencies": [
|
||||
"hbenl.vscode-test-explorer",
|
||||
"ms-vscode.test-adapter-converter"
|
||||
"hbenl.vscode-test-explorer"
|
||||
],
|
||||
"capabilities": {
|
||||
"untrustedWorkspaces": {
|
||||
@@ -45,14 +44,17 @@
|
||||
"onCommand:codeQLDatabases.chooseDatabaseFolder",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseArchive",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseInternet",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseGithub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
"onCommand:codeQL.viewCfg",
|
||||
"onCommand:codeQL.openReferencedFile",
|
||||
"onCommand:codeQL.previewQueryHelp",
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
"onCommand:codeQL.chooseDatabaseArchive",
|
||||
"onCommand:codeQL.chooseDatabaseInternet",
|
||||
"onCommand:codeQL.chooseDatabaseGithub",
|
||||
"onCommand:codeQL.chooseDatabaseLgtm",
|
||||
"onCommand:codeQLDatabases.chooseDatabase",
|
||||
"onCommand:codeQLDatabases.setCurrentDatabase",
|
||||
@@ -133,7 +135,7 @@
|
||||
"title": "CodeQL",
|
||||
"properties": {
|
||||
"codeQL.cli.executablePath": {
|
||||
"scope": "window",
|
||||
"scope": "machine-overridable",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"markdownDescription": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. If empty, the extension will look for a CodeQL executable on your shell PATH, or if CodeQL is not on your PATH, download and manage its own CodeQL executable."
|
||||
@@ -207,7 +209,8 @@
|
||||
null
|
||||
],
|
||||
"default": null,
|
||||
"description": "Path to a directory where the CodeQL extension should store query server logs. If empty, the extension stores logs in a temporary workspace folder and deletes the contents after each run."
|
||||
"description": "Path to a directory where the CodeQL extension should store query server logs. If empty, the extension stores logs in a temporary workspace folder and deletes the contents after each run.",
|
||||
"markdownDeprecationMessage": "This property is deprecated and no longer has any effect. All query logs are stored in the query history folder next to the query results."
|
||||
},
|
||||
"codeQL.runningQueries.quickEvalCodelens": {
|
||||
"type": "boolean",
|
||||
@@ -221,9 +224,15 @@
|
||||
},
|
||||
"codeQL.queryHistory.format": {
|
||||
"type": "string",
|
||||
"default": "%q on %d - %s, %r result count [%t]",
|
||||
"default": "%q on %d - %s, %r [%t]",
|
||||
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
|
||||
},
|
||||
"codeQL.queryHistory.ttl": {
|
||||
"type": "number",
|
||||
"default": 30,
|
||||
"description": "Number of days to retain queries in the query history before being automatically deleted.",
|
||||
"scope": "machine"
|
||||
},
|
||||
"codeQL.runningTests.additionalTestArguments": {
|
||||
"scope": "window",
|
||||
"type": "array",
|
||||
@@ -250,7 +259,7 @@
|
||||
"scope": "application",
|
||||
"description": "Specifies whether or not to write telemetry events to the extension log."
|
||||
},
|
||||
"codeQL.remoteQueries.repositoryLists": {
|
||||
"codeQL.variantAnalysis.repositoryLists": {
|
||||
"type": [
|
||||
"object",
|
||||
null
|
||||
@@ -264,14 +273,14 @@
|
||||
}
|
||||
},
|
||||
"default": null,
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to query remotely. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to run variant analysis against. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
},
|
||||
"codeQL.remoteQueries.controllerRepo": {
|
||||
"codeQL.variantAnalysis.controllerRepo": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
|
||||
"patternErrorMessage": "Please enter a valid GitHub repository",
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Remote query\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -289,12 +298,12 @@
|
||||
"title": "CodeQL: Run Query on Multiple Databases"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"title": "CodeQL: Run Remote Query"
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"title": "CodeQL: Run Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"title": "CodeQL: [Internal] Show fake remote query results"
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"title": "CodeQL: Export Variant Analysis Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
@@ -352,6 +361,14 @@
|
||||
"dark": "media/dark/cloud-download.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"title": "Download Database from GitHub",
|
||||
"icon": {
|
||||
"light": "media/light/github.svg",
|
||||
"dark": "media/dark/github.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"title": "Download from LGTM",
|
||||
@@ -368,6 +385,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"title": "CodeQL: View AST"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"title": "CodeQL: View CFG"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.upgradeCurrentDatabase",
|
||||
"title": "CodeQL: Upgrade Current Database"
|
||||
@@ -420,6 +441,10 @@
|
||||
"command": "codeQL.chooseDatabaseInternet",
|
||||
"title": "CodeQL: Download Database"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"title": "CodeQL: Download Database from GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"title": "CodeQL: Download Database from LGTM"
|
||||
@@ -496,6 +521,18 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"title": "Show Query Log"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"title": "Open query directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"title": "Cancel"
|
||||
@@ -504,6 +541,10 @@
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"title": "Show Query Text"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"title": "Export Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"title": "View Results (CSV)"
|
||||
@@ -528,6 +569,10 @@
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"title": "Compare Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"title": "Open Variant Analysis on GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.nextPathStep",
|
||||
"title": "CodeQL: Show Next Step on Path"
|
||||
@@ -588,6 +633,11 @@
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "view == codeQLDatabases",
|
||||
@@ -668,7 +718,7 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.removeHistoryItem",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == cancelledResultsItem"
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.setLabel",
|
||||
@@ -685,11 +735,31 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && !hasRemoteServer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"group": "9_qlCommands",
|
||||
@@ -713,7 +783,12 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == inProgressResultsItem"
|
||||
"when": "viewItem == inProgressResultsItem || viewItem == inProgressRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.showOutputDifferences",
|
||||
@@ -737,6 +812,11 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && !explorerResourceIsFolder && !listMultiSelection"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"group": "9_qlCommands",
|
||||
@@ -767,11 +847,11 @@
|
||||
"when": "resourceLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
@@ -798,6 +878,14 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -842,6 +930,10 @@
|
||||
"command": "codeQLDatabases.chooseDatabaseInternet",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "false"
|
||||
@@ -866,14 +958,34 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"when": "false"
|
||||
@@ -937,13 +1049,17 @@
|
||||
"when": "editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.quickEval",
|
||||
"when": "editorLangId == ql"
|
||||
@@ -1002,6 +1118,8 @@
|
||||
"build": "gulp",
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:css": "gulp watchCss",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
@@ -1014,19 +1132,22 @@
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.5.6",
|
||||
"@primer/octicons-react": "^16.3.0",
|
||||
"@primer/react": "^34.3.0",
|
||||
"@primer/react": "^35.0.0",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"fs-extra": "^9.0.1",
|
||||
"d3": "^6.3.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"minimist": "~1.2.5",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
"node-fetch": "~2.6.7",
|
||||
"path-browserify": "^1.0.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"semver": "~7.3.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
"stream": "^0.0.2",
|
||||
"stream-chain": "~2.2.4",
|
||||
"stream-json": "~1.7.3",
|
||||
@@ -1040,25 +1161,27 @@
|
||||
"vscode-languageclient": "^6.1.3",
|
||||
"vscode-test-adapter-api": "~1.7.0",
|
||||
"vscode-test-adapter-util": "~0.7.0",
|
||||
"zip-a-folder": "~0.0.12"
|
||||
"zip-a-folder": "~1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "0.0.31",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
"@types/mocha": "^9.0.0",
|
||||
"@types/nanoid": "^3.0.0",
|
||||
"@types/node": "^12.14.1",
|
||||
"@types/node": "^16.11.25",
|
||||
"@types/node-fetch": "~2.5.2",
|
||||
"@types/proxyquire": "~1.3.28",
|
||||
"@types/react": "^17.0.2",
|
||||
@@ -1073,7 +1196,7 @@
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^4.32.1",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
@@ -1087,27 +1210,26 @@
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.0.0",
|
||||
"gulp-sourcemaps": "^2.6.5",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"jsonc-parser": "^2.3.0",
|
||||
"lint-staged": "~10.2.2",
|
||||
"mocha": "^9.1.3",
|
||||
"mocha-sinon": "~2.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~9.0.0",
|
||||
"sinon": "~13.0.1",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~0.23.1",
|
||||
"through2": "^3.0.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^8.3.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "^4.3.2",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.65.0",
|
||||
"vsce": "^2.7.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^5.28.0",
|
||||
"webpack-cli": "^4.6.0"
|
||||
@@ -1115,11 +1237,11 @@
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run format-staged",
|
||||
"pre-push": "npm run lint"
|
||||
"pre-push": "npm run lint && scripts/forbid-mocha-only"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/*.{json,css,scss,md}": [
|
||||
"./**/*.{json,css,scss}": [
|
||||
"prettier --write"
|
||||
],
|
||||
"./**/*.{ts,tsx}": [
|
||||
@@ -1128,6 +1250,6 @@
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"glob-parent": "~6.0.0"
|
||||
"glob-parent": "6.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
@@ -0,0 +1,6 @@
|
||||
if grep -rq --include '*.test.ts' 'it.only\|describe.only' './test' './src'; then
|
||||
echo 'There is a .only() in the tests. Please remove it.'
|
||||
exit 1;
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* The d3 library is designed to work in both the browser and
|
||||
* node. Consequently their typings files refer to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the affected types so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type RequestInit = Record<string, unknown>;
|
||||
declare type ElementTagNameMap = any;
|
||||
declare type NodeListOf<T> = Record<string, T>;
|
||||
declare type Node = Record<string, unknown>;
|
||||
declare type XMLDocument = Record<string, unknown>;
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
TextEditorSelectionChangeEvent,
|
||||
TextEditorSelectionChangeKind,
|
||||
Location,
|
||||
Range
|
||||
Range,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -104,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
export class AstViewer extends DisposableObject {
|
||||
private treeView: TreeView<AstItem>;
|
||||
private treeDataProvider: AstViewerDataProvider;
|
||||
private currentFile: string | undefined;
|
||||
private currentFileUri: Uri | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -125,12 +126,12 @@ export class AstViewer extends DisposableObject {
|
||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||
}
|
||||
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
||||
this.currentFile = fileName;
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
@@ -174,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
if (
|
||||
this.treeView.visible &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||
e.selections.length === 1
|
||||
) {
|
||||
const selection = e.selections[0];
|
||||
@@ -199,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.db = undefined;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
this.currentFile = undefined;
|
||||
this.currentFileUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,11 +3,12 @@ import * as Octokit from '@octokit/rest';
|
||||
|
||||
const GITHUB_AUTH_PROVIDER_ID = 'github';
|
||||
|
||||
// 'repo' scope should be enough for triggering workflows. For a comprehensive list, see:
|
||||
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
|
||||
// For a comprehensive list of scopes, see:
|
||||
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
|
||||
const SCOPES = ['repo'];
|
||||
const SCOPES = ['repo', 'gist'];
|
||||
|
||||
/**
|
||||
/**
|
||||
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
|
||||
*/
|
||||
export class Credentials {
|
||||
@@ -18,6 +19,15 @@ export class Credentials {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
private constructor() { }
|
||||
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance.
|
||||
*
|
||||
* Do not call this method until you know you actually need an instance of credentials.
|
||||
* since calling this method will require the user to log in.
|
||||
*
|
||||
* @param context The extension context.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
|
||||
const c = new Credentials();
|
||||
c.registerListeners(context);
|
||||
|
||||
11
extensions/ql-vscode/src/blob.d.ts
vendored
11
extensions/ql-vscode/src/blob.d.ts
vendored
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* The npm library jszip is designed to work in both the browser and
|
||||
* node. Consequently its typings @types/jszip refers to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the type `Blob` here so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type Blob = string;
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from 'semver';
|
||||
import { runCodeQlCliCommand } from './cli';
|
||||
import { Logger } from './logging';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Get the version of a CodeQL CLI.
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as cpp from 'child-process-promise';
|
||||
import * as child_process from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as sarif from 'sarif';
|
||||
import { SemVer } from 'semver';
|
||||
@@ -7,17 +8,17 @@ import { Readable } from 'stream';
|
||||
import { StringDecoder } from 'string_decoder';
|
||||
import * as tk from 'tree-kill';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken, Disposable, Uri } from 'vscode';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
import { dbSchemeToLanguage } from './helpers';
|
||||
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||
|
||||
/**
|
||||
* The version of the SARIF format that we are using.
|
||||
@@ -345,7 +346,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
stderrBuffers.length == 0
|
||||
? new Error(`${description} failed: ${err}`)
|
||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||
newError.stack += (err.stack || '');
|
||||
newError.stack += getErrorStack(err);
|
||||
throw newError;
|
||||
} finally {
|
||||
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
@@ -403,7 +404,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
if (cancellationToken !== undefined) {
|
||||
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
|
||||
tk(child.pid);
|
||||
tk(child.pid || 0);
|
||||
});
|
||||
}
|
||||
if (logger !== undefined) {
|
||||
@@ -447,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
yield JSON.parse(event) as EventType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -502,7 +503,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
return JSON.parse(result) as OutputType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -514,8 +515,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
|
||||
const subcommandArgs = [
|
||||
'--query', queryPath,
|
||||
'--additional-packs',
|
||||
workspaces.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
|
||||
}
|
||||
@@ -528,8 +528,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const subcommandArgs = [
|
||||
'--format', 'bylanguage',
|
||||
queryUri.fsPath,
|
||||
'--additional-packs',
|
||||
workspaces.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return JSON.parse(await this.runCodeQlCliCommand(['resolve', 'queries'], subcommandArgs, 'Resolving query by language'));
|
||||
}
|
||||
@@ -562,6 +561,17 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Issues an internal clear-cache command to the cli server. This
|
||||
* command is used to clear the qlpack cache of the server.
|
||||
*
|
||||
* This cache is generally cleared every 1s. This method is used
|
||||
* to force an early clearing of the cache.
|
||||
*/
|
||||
public async clearCache(): Promise<void> {
|
||||
await this.runCodeQlCliCommand(['clear-cache'], [], 'Clearing qlpack cache');
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs QL tests.
|
||||
* @param testPaths Full paths of the tests to run.
|
||||
@@ -573,7 +583,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
): AsyncGenerator<TestCompleted, void, unknown> {
|
||||
|
||||
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
|
||||
'--additional-packs', workspaces.join(path.delimiter),
|
||||
...this.getAdditionalPacksArg(workspaces),
|
||||
'--threads',
|
||||
this.cliConfig.numberTestThreads.toString(),
|
||||
...testPaths
|
||||
@@ -595,8 +605,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
|
||||
/** Resolves the ML models that should be available when evaluating a query. */
|
||||
async resolveMlModels(additionalPacks: string[]): Promise<MlModelsInfo> {
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(['resolve', 'ml-models'], ['--additional-packs',
|
||||
additionalPacks.join(path.delimiter)], 'Resolving ML models', false);
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||
['resolve', 'ml-models'],
|
||||
this.getAdditionalPacksArg(additionalPacks),
|
||||
'Resolving ML models',
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -651,6 +665,26 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary of an evaluation log.
|
||||
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a human-readable summary of it to.
|
||||
*/
|
||||
async generateLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
endSummaryPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the results from a bqrs.
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
@@ -674,20 +708,13 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
||||
}
|
||||
|
||||
async runInterpretCommand(format: string, metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
async runInterpretCommand(format: string, additonalArgs: string[], metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
const args = [
|
||||
'--output', interpretedResultsPath,
|
||||
'--format', format,
|
||||
// Forward all of the query metadata.
|
||||
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
||||
];
|
||||
if (format == SARIF_FORMAT) {
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
args.push('--no-group-results');
|
||||
}
|
||||
].concat(additonalArgs);
|
||||
if (sourceInfo !== undefined) {
|
||||
args.push(
|
||||
'--source-archive', sourceInfo.sourceArchive,
|
||||
@@ -709,13 +736,47 @@ export class CodeQLCliServer implements Disposable {
|
||||
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
||||
}
|
||||
|
||||
async interpretBqrs(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
await this.runInterpretCommand(SARIF_FORMAT, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
async interpretBqrsSarif(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
const additionalArgs = [
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
'--no-group-results'
|
||||
];
|
||||
|
||||
await this.runInterpretCommand(SARIF_FORMAT, additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return await sarifParser(interpretedResultsPath);
|
||||
}
|
||||
|
||||
// Warning: this function is untenable for large dot files,
|
||||
async readDotFiles(dir: string): Promise<string[]> {
|
||||
const dotFiles: Promise<string>[] = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
if (file.endsWith('.dot')) {
|
||||
dotFiles.push(fs.readFile(file, 'utf8'));
|
||||
}
|
||||
}
|
||||
return Promise.all(dotFiles);
|
||||
}
|
||||
|
||||
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
|
||||
const additionalArgs = sourceInfo
|
||||
? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}']
|
||||
: [];
|
||||
|
||||
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
|
||||
try {
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
||||
await this.runInterpretCommand(CSV_FORMAT, metadata, resultsPath, csvPath, sourceInfo);
|
||||
await this.runInterpretCommand(CSV_FORMAT, [], metadata, resultsPath, csvPath, sourceInfo);
|
||||
}
|
||||
|
||||
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
||||
@@ -761,7 +822,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A list of database upgrade script directories
|
||||
*/
|
||||
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
|
||||
const args = ['--additional-packs', searchPath.join(path.delimiter), '--dbscheme', dbScheme];
|
||||
const args = [...this.getAdditionalPacksArg(searchPath), '--dbscheme', dbScheme];
|
||||
if (targetDbScheme) {
|
||||
args.push('--target-dbscheme', targetDbScheme);
|
||||
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
|
||||
@@ -783,7 +844,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A dictionary mapping qlpack name to the directory it comes from
|
||||
*/
|
||||
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath?.length) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
@@ -829,7 +890,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A list of query files found.
|
||||
*/
|
||||
async resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath !== undefined) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
@@ -862,8 +923,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
'-o',
|
||||
outputPath,
|
||||
dir,
|
||||
'--additional-packs',
|
||||
workspaceFolders.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaceFolders)
|
||||
];
|
||||
if (!precompile && await this.cliConstraints.supportsNoPrecompile()) {
|
||||
args.push('--no-precompile');
|
||||
@@ -900,6 +960,10 @@ export class CodeQLCliServer implements Disposable {
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
}
|
||||
@@ -918,6 +982,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
throw new Error('No distribution found');
|
||||
}
|
||||
}
|
||||
|
||||
private getAdditionalPacksArg(paths: string[]): string[] {
|
||||
return paths.length
|
||||
? ['--additional-packs', paths.join(path.delimiter)]
|
||||
: [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1004,7 +1074,7 @@ export async function runCodeQlCliCommand(
|
||||
void logger.log('CLI command succeeded.');
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1060,8 +1130,8 @@ class SplitBuffer {
|
||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||
for (const separator of this.separators) {
|
||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||
const line = this.buffer.substr(0, this.searchIndex);
|
||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
||||
const line = this.buffer.slice(0, this.searchIndex);
|
||||
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||
this.searchIndex = 0;
|
||||
return line;
|
||||
}
|
||||
@@ -1185,7 +1255,7 @@ export class CliVersionConstraint {
|
||||
public static CLI_VERSION_WITH_NO_PRECOMPILE = new SemVer('2.7.1');
|
||||
|
||||
/**
|
||||
* CLI version where remote queries are supported.
|
||||
* CLI version where remote queries (variant analysis) are supported.
|
||||
*/
|
||||
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||
|
||||
@@ -1204,6 +1274,23 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PACKAGING = new SemVer('2.6.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
|
||||
* on a per-query server basis.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
/**
|
||||
* CLI version that supports rotating structured logs to produce one per query.
|
||||
*
|
||||
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||
* and summaries for them.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
@@ -1259,4 +1346,12 @@ export class CliVersionConstraint {
|
||||
async supportsPackaging() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PACKAGING);
|
||||
}
|
||||
|
||||
async supportsStructuredEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { telemetryListener } from './telemetry';
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
@@ -121,8 +122,9 @@ export function commandRunner(
|
||||
try {
|
||||
return await task(...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -132,8 +134,8 @@ export function commandRunner(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
fullMessage
|
||||
@@ -160,7 +162,8 @@ export function commandRunner(
|
||||
export function commandRunnerWithProgress<R>(
|
||||
commandId: string,
|
||||
task: ProgressTask<R>,
|
||||
progressOptions: Partial<ProgressOptions>
|
||||
progressOptions: Partial<ProgressOptions>,
|
||||
outputLogger = logger
|
||||
): Disposable {
|
||||
return commands.registerCommand(commandId, async (...args: any[]) => {
|
||||
const startTime = Date.now();
|
||||
@@ -172,21 +175,23 @@ export function commandRunnerWithProgress<R>(
|
||||
try {
|
||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
void logger.log(errorMessage);
|
||||
void outputLogger.log(errorMessage);
|
||||
} else {
|
||||
void showAndLogWarningMessage(errorMessage);
|
||||
void showAndLogWarningMessage(errorMessage, { outputLogger });
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
outputLogger,
|
||||
fullMessage
|
||||
});
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { tmpDir } from '../run-queries';
|
||||
import { tmpDir } from '../helpers';
|
||||
import {
|
||||
FromCompareViewMessage,
|
||||
ToCompareViewMessage,
|
||||
@@ -20,11 +20,13 @@ import { DatabaseManager } from '../databases';
|
||||
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { FullCompletedQueryInfo } from '../query-results';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||
|
||||
interface ComparePair {
|
||||
from: FullCompletedQueryInfo;
|
||||
to: FullCompletedQueryInfo;
|
||||
from: CompletedLocalQueryInfo;
|
||||
to: CompletedLocalQueryInfo;
|
||||
}
|
||||
|
||||
export class CompareInterfaceManager extends DisposableObject {
|
||||
@@ -38,16 +40,17 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private cliServer: CodeQLCliServer,
|
||||
private logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider,
|
||||
private showQueryResultsCallback: (
|
||||
item: FullCompletedQueryInfo
|
||||
item: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async showResults(
|
||||
from: FullCompletedQueryInfo,
|
||||
to: FullCompletedQueryInfo,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName?: string
|
||||
) {
|
||||
this.comparePair = { from, to };
|
||||
@@ -70,7 +73,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
try {
|
||||
rows = this.compareResults(fromResultSet, toResultSet);
|
||||
} catch (e) {
|
||||
message = e.message;
|
||||
message = getErrorMessage(e);
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
@@ -80,12 +83,12 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
// since we split the description into several rows
|
||||
// only run interpolation if the label is user-defined
|
||||
// otherwise we will wind up with duplicated rows
|
||||
name: from.getShortLabel(),
|
||||
name: this.labelProvider.getShortLabel(from),
|
||||
status: from.completedQuery.statusString,
|
||||
time: from.startTime,
|
||||
},
|
||||
toQuery: {
|
||||
name: to.getShortLabel(),
|
||||
name: this.labelProvider.getShortLabel(to),
|
||||
status: to.completedQuery.statusString,
|
||||
time: to.startTime,
|
||||
},
|
||||
@@ -188,8 +191,8 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private async findCommonResultSetNames(
|
||||
from: FullCompletedQueryInfo,
|
||||
to: FullCompletedQueryInfo,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName: string | undefined
|
||||
): Promise<[string[], string, RawResultSet, RawResultSet]> {
|
||||
const fromSchemas = await this.cliServer.bqrsInfo(
|
||||
|
||||
@@ -4,10 +4,7 @@
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": [
|
||||
"es6",
|
||||
"dom"
|
||||
],
|
||||
"lib": ["ES2021", "dom"],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
@@ -17,7 +14,5 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"experimentalDecorators": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
||||
import { DistributionManager } from './distribution';
|
||||
import { logger } from './logging';
|
||||
import { ONE_DAY_IN_MS } from './pure/helpers-pure';
|
||||
|
||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||
export class Setting {
|
||||
@@ -54,8 +55,11 @@ const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
|
||||
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
||||
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
|
||||
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
|
||||
|
||||
// Query History configuration
|
||||
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
||||
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
const QUERY_HISTORY_TTL = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
|
||||
/** When these settings change, the distribution should be updated. */
|
||||
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
||||
@@ -71,7 +75,6 @@ export interface DistributionConfig {
|
||||
}
|
||||
|
||||
// Query server configuration
|
||||
|
||||
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
|
||||
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
|
||||
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
|
||||
@@ -91,7 +94,10 @@ export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
|
||||
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
|
||||
|
||||
/** When these settings change, the running query server should be restarted. */
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING, DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING];
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [
|
||||
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||
];
|
||||
|
||||
export interface QueryServerConfig {
|
||||
codeQlPath: string;
|
||||
@@ -106,10 +112,11 @@ export interface QueryServerConfig {
|
||||
}
|
||||
|
||||
/** When these settings change, the query history should be refreshed. */
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING];
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
|
||||
|
||||
export interface QueryHistoryConfig {
|
||||
format: string;
|
||||
ttlInMillis: number;
|
||||
onDidChangeConfiguration: Event<void>;
|
||||
}
|
||||
|
||||
@@ -251,6 +258,13 @@ export class QueryHistoryConfigListener extends ConfigListener implements QueryH
|
||||
public get format(): string {
|
||||
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration value is in days, but return the value in milliseconds to make it easier to use.
|
||||
*/
|
||||
public get ttlInMillis(): number {
|
||||
return (QUERY_HISTORY_TTL.getValue<number>() || 30) * ONE_DAY_IN_MS;
|
||||
}
|
||||
}
|
||||
|
||||
export class CliConfigListener extends ConfigListener implements CliConfig {
|
||||
@@ -308,11 +322,11 @@ export function isCanary() {
|
||||
*/
|
||||
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
||||
|
||||
// Settings for remote queries
|
||||
const REMOTE_QUERIES_SETTING = new Setting('remoteQueries', ROOT_SETTING);
|
||||
// Settings for variant analysis
|
||||
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
|
||||
|
||||
/**
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Remote query" command.
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a JSON object where each key is a user-specified name (string),
|
||||
@@ -329,7 +343,7 @@ export async function setRemoteRepositoryLists(lists: Record<string, string[]> |
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Remote query" command.
|
||||
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
|
||||
@@ -345,13 +359,16 @@ export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to insecurely load ML models from CodeQL packs.
|
||||
*
|
||||
* This setting is for internal users only.
|
||||
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||
* Default value is "main".
|
||||
* Note: This command is only available for internal users.
|
||||
*/
|
||||
const SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS =
|
||||
new Setting('shouldInsecurelyLoadModelsFromPacks', RUNNING_QUERIES_SETTING);
|
||||
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function shouldInsecurelyLoadMlModelsFromPacks(): boolean {
|
||||
return SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS.getValue<boolean>();
|
||||
export function getActionBranch(): string {
|
||||
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||
}
|
||||
|
||||
export function isIntegrationTestMode() {
|
||||
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
||||
queryResults: QueryWithResults,
|
||||
private cli: CodeQLCliServer,
|
||||
public db: DatabaseItem,
|
||||
public fileName: string
|
||||
public fileName: Uri
|
||||
) {
|
||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum KeyType {
|
||||
DefinitionQuery = 'DefinitionQuery',
|
||||
ReferenceQuery = 'ReferenceQuery',
|
||||
PrintAstQuery = 'PrintAstQuery',
|
||||
PrintCfgQuery = 'PrintCfgQuery',
|
||||
}
|
||||
|
||||
export function tagOfKeyType(keyType: KeyType): string {
|
||||
@@ -12,6 +13,8 @@ export function tagOfKeyType(keyType: KeyType): string {
|
||||
return 'ide-contextual-queries/local-references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'ide-contextual-queries/print-ast';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'ide-contextual-queries/print-cfg';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +26,8 @@ export function nameOfKeyType(keyType: KeyType): string {
|
||||
return 'references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'print AST';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'print CFG';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +37,7 @@ export function kindOfKeyType(keyType: KeyType): string {
|
||||
case KeyType.ReferenceQuery:
|
||||
return 'definitions';
|
||||
case KeyType.PrintAstQuery:
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'graph';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ export interface FullLocationLink extends LocationLink {
|
||||
* @param dbm The database manager
|
||||
* @param uriString The selected source file and location
|
||||
* @param keyType The contextual query type to run
|
||||
* @param queryStorageDir The directory to store the query results
|
||||
* @param progress A progress callback
|
||||
* @param token A CancellationToken
|
||||
* @param filter A function that will filter extraneous results
|
||||
@@ -38,6 +39,7 @@ export async function getLocationsForUriString(
|
||||
dbm: DatabaseManager,
|
||||
uriString: string,
|
||||
keyType: KeyType,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
filter: (src: string, dest: string) => boolean
|
||||
@@ -69,6 +71,7 @@ export async function getLocationsForUriString(
|
||||
qs,
|
||||
db,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
TextDocument,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath, zipArchiveScheme } from '../archive-filesystem-provider';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
@@ -42,6 +41,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
||||
}
|
||||
@@ -69,6 +69,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -84,6 +85,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
||||
}
|
||||
@@ -116,6 +118,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -136,6 +139,7 @@ export class TemplatePrintAstProvider {
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
|
||||
}
|
||||
@@ -155,7 +159,7 @@ export class TemplatePrintAstProvider {
|
||||
return new AstBuilder(
|
||||
query, this.cli,
|
||||
this.dbm.findDatabaseItem(dbUri)!,
|
||||
path.basename(fileUri.fsPath),
|
||||
fileUri,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -216,6 +220,7 @@ export class TemplatePrintAstProvider {
|
||||
this.qs,
|
||||
db,
|
||||
initialInfo,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
@@ -224,3 +229,62 @@ export class TemplatePrintAstProvider {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class TemplatePrintCfgProvider {
|
||||
private cache: CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private dbm: DatabaseManager,
|
||||
) {
|
||||
this.cache = new CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>(this.getCfgUri.bind(this));
|
||||
}
|
||||
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, messages.TemplateDefinitions] | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
return await this.cache.get(document.uri.toString());
|
||||
}
|
||||
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, messages.TemplateDefinitions]> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||
}
|
||||
|
||||
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
|
||||
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
|
||||
if (!db) {
|
||||
throw new Error('Can\'t infer database from the provided source.');
|
||||
}
|
||||
|
||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||
if (!qlpack) {
|
||||
throw new Error('Can\'t infer qlpack from database source archive.');
|
||||
}
|
||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
|
||||
if (queries.length > 1) {
|
||||
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
if (queries.length === 0) {
|
||||
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
|
||||
const queryUri = Uri.file(queries[0]);
|
||||
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [queryUri, templates];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,9 @@ import {
|
||||
ProgressCallback,
|
||||
} from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { tmpDir } from './run-queries';
|
||||
import { tmpDir } from './helpers';
|
||||
import { Credentials } from './authentication';
|
||||
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||
@@ -46,8 +48,10 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -61,6 +65,82 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from GitHub.
|
||||
* User enters a GitHub repository and then the user is asked which language
|
||||
* to download (if there is more than one)
|
||||
*
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
*/
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose repository',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const githubRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!githubRepo) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!looksLikeGithubRepo(githubRepo)) {
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, credentials, progress);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { databaseUrl, name, owner } = result;
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
* {
|
||||
* token: 'xxxx',
|
||||
* tokenType: 'oauth',
|
||||
* type: 'token',
|
||||
* }
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
if (!octokitToken) {
|
||||
// Just print a generic error message for now. Ideally we could show more debugging info, like the
|
||||
// octokit object, but that would expose a user token.
|
||||
throw new Error('Unable to get GitHub token.');
|
||||
}
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': `Bearer ${octokitToken}` },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
`${owner}/${name}`,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
return item;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from lgtm.
|
||||
* User enters a project url and then the user is asked which language
|
||||
@@ -90,12 +170,14 @@ export async function promptImportLgtmDatabase(
|
||||
}
|
||||
|
||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl, progress);
|
||||
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||
if (databaseUrl) {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -140,8 +222,10 @@ export async function importArchiveDatabase(
|
||||
try {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -152,7 +236,7 @@ export async function importArchiveDatabase(
|
||||
}
|
||||
return item;
|
||||
} catch (e) {
|
||||
if (e.message.includes('unexpected end of file')) {
|
||||
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||
} else {
|
||||
// delegate
|
||||
@@ -166,15 +250,19 @@ export async function importArchiveDatabase(
|
||||
* or in the local filesystem.
|
||||
*
|
||||
* @param databaseUrl URL from which to grab the database
|
||||
* @param requestHeaders Headers to send with the request
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
* @param nameOverride a name for the database that overrides the default
|
||||
* @param progress callback to send progress messages to
|
||||
* @param token cancellation token
|
||||
*/
|
||||
async function databaseArchiveFetcher(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
nameOverride: string | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer,
|
||||
@@ -193,7 +281,7 @@ async function databaseArchiveFetcher(
|
||||
if (isFile(databaseUrl)) {
|
||||
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
} else {
|
||||
await fetchAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||
}
|
||||
|
||||
progress({
|
||||
@@ -216,7 +304,7 @@ async function databaseArchiveFetcher(
|
||||
});
|
||||
await ensureZippedSourceLocation(dbPath);
|
||||
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath));
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
|
||||
await databaseManager.setCurrentDatabaseItem(item);
|
||||
return item;
|
||||
} else {
|
||||
@@ -292,6 +380,7 @@ async function readAndUnzip(
|
||||
|
||||
async function fetchAndUnzip(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
@@ -310,7 +399,10 @@ async function fetchAndUnzip(
|
||||
step: 1,
|
||||
});
|
||||
|
||||
const response = await checkForFailingResponse(await fetch(databaseUrl), 'Error downloading database');
|
||||
const response = await checkForFailingResponse(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
'Error downloading database'
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get('content-length');
|
||||
@@ -325,7 +417,6 @@ async function fetchAndUnzip(
|
||||
|
||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
|
||||
|
||||
|
||||
// remove archivePath eagerly since these archives can be large.
|
||||
await fs.remove(archivePath);
|
||||
}
|
||||
@@ -381,6 +472,89 @@ export async function findDirWithFile(
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||
*
|
||||
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||
* name with owner (NWO): `<owner>/<repo>`.
|
||||
*
|
||||
* @param githubRepo The GitHub repository URL or NWO
|
||||
*
|
||||
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||
*/
|
||||
export function looksLikeGithubRepo(
|
||||
githubRepo: string | undefined
|
||||
): githubRepo is string {
|
||||
if (!githubRepo) {
|
||||
return false;
|
||||
}
|
||||
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GitHub repository URL to the corresponding NWO.
|
||||
* @param githubUrl The GitHub repository URL
|
||||
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||
*/
|
||||
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
try {
|
||||
const uri = Uri.parse(githubUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
return;
|
||||
}
|
||||
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||
return;
|
||||
}
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
const nwo = `${paths[0]}/${paths[1]}`;
|
||||
if (REPO_REGEX.test(nwo)) {
|
||||
return nwo;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// Ignore the error here, since we catch failures at a higher level.
|
||||
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback): Promise<{
|
||||
databaseUrl: string,
|
||||
owner: string,
|
||||
name: string
|
||||
} | undefined> {
|
||||
try {
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
|
||||
owner,
|
||||
name: repo
|
||||
};
|
||||
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||
@@ -416,7 +590,7 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
const paths = uri.path.split('/').filter((segment) => segment);
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
return paths.length >= 4 && paths[0] === 'projects';
|
||||
} catch (e) {
|
||||
return false;
|
||||
@@ -434,7 +608,7 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
// Only matches the '/g/' provider (github)
|
||||
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');
|
||||
@@ -446,7 +620,7 @@ function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
}
|
||||
|
||||
// exported for testing
|
||||
export async function convertToDatabaseUrl(
|
||||
export async function convertLgtmUrlToDatabaseUrl(
|
||||
lgtmUrl: string,
|
||||
progress: ProgressCallback) {
|
||||
try {
|
||||
@@ -467,7 +641,9 @@ export async function convertToDatabaseUrl(
|
||||
}
|
||||
}
|
||||
|
||||
const language = await promptForLanguage(projectJson, progress);
|
||||
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
@@ -479,7 +655,7 @@ export async function convertToDatabaseUrl(
|
||||
language,
|
||||
].join('/')}`;
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${e.message}`);
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||
}
|
||||
}
|
||||
@@ -487,7 +663,7 @@ export async function convertToDatabaseUrl(
|
||||
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment) => segment)
|
||||
uri.path.split('/').filter((segment: string) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
@@ -495,7 +671,7 @@ async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
}
|
||||
|
||||
async function promptForLanguage(
|
||||
projectJson: any,
|
||||
languages: string[],
|
||||
progress: ProgressCallback
|
||||
): Promise<string | undefined> {
|
||||
progress({
|
||||
@@ -503,17 +679,19 @@ async function promptForLanguage(
|
||||
step: 2,
|
||||
maxStep: 2
|
||||
});
|
||||
if (!projectJson?.languages?.length) {
|
||||
return;
|
||||
if (!languages.length) {
|
||||
throw new Error('No databases found');
|
||||
}
|
||||
if (projectJson.languages.length === 1) {
|
||||
return projectJson.languages[0].language;
|
||||
if (languages.length === 1) {
|
||||
return languages[0];
|
||||
}
|
||||
|
||||
return await window.showQuickPick(
|
||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
||||
placeHolder: 'Select the database language to download:'
|
||||
}
|
||||
languages,
|
||||
{
|
||||
placeHolder: 'Select the database language to download:',
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,11 +33,13 @@ import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
promptImportInternetDatabase,
|
||||
promptImportLgtmDatabase,
|
||||
} from './databaseFetcher';
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -219,7 +221,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -291,6 +294,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseGithub',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await this.getCredentials();
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseLgtm',
|
||||
@@ -376,7 +393,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -444,7 +461,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -462,6 +479,21 @@ export class DatabaseUI extends DisposableObject {
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportGithubDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
credentials,
|
||||
progress,
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseLgtm = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
@@ -590,8 +622,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${e.message
|
||||
}`
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -19,6 +19,7 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -147,7 +148,7 @@ export async function findSourceArchive(
|
||||
}
|
||||
|
||||
async function resolveDatabase(
|
||||
databasePath: string
|
||||
databasePath: string,
|
||||
): Promise<DatabaseContents> {
|
||||
|
||||
const name = path.basename(databasePath);
|
||||
@@ -169,7 +170,9 @@ async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
||||
return await glob('*.dbscheme', { cwd: dbDirectory });
|
||||
}
|
||||
|
||||
async function resolveDatabaseContents(uri: vscode.Uri): Promise<DatabaseContents> {
|
||||
async function resolveDatabaseContents(
|
||||
uri: vscode.Uri,
|
||||
): Promise<DatabaseContents> {
|
||||
if (uri.scheme !== 'file') {
|
||||
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
||||
}
|
||||
@@ -359,7 +362,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
}
|
||||
catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -568,14 +571,15 @@ export class DatabaseManager extends DisposableObject {
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
uri: vscode.Uri,
|
||||
displayName?: string
|
||||
): Promise<DatabaseItem> {
|
||||
const contents = await resolveDatabaseContents(uri);
|
||||
// Ignore the source archive for QLTest databases by default.
|
||||
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
||||
const fullOptions: FullDatabaseOptions = {
|
||||
ignoreSourceArchive: isQLTestDatabase,
|
||||
// displayName is only set if a user explicitly renames a database
|
||||
displayName: undefined,
|
||||
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||
displayName,
|
||||
dateAdded: Date.now(),
|
||||
language: await this.getPrimaryLanguage(uri.fsPath)
|
||||
};
|
||||
@@ -726,7 +730,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -841,7 +845,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
void logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||
}
|
||||
|
||||
// note that we use undefined as the item in order to reset the entire tree
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'source-map-support/register';
|
||||
import {
|
||||
CancellationToken,
|
||||
CancellationTokenSource,
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { LanguageClient } from 'vscode-languageclient';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import { testExplorerExtensionId, TestHub } from 'vscode-test-adapter-api';
|
||||
@@ -41,7 +43,8 @@ import { DatabaseUI } from './databases-ui';
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider
|
||||
} from './contextual/templateProvider';
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -53,17 +56,26 @@ import {
|
||||
GithubApiError,
|
||||
GithubRateLimitedError
|
||||
} from './distribution';
|
||||
import * as helpers from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import {
|
||||
findLanguage,
|
||||
tmpDirDisposal,
|
||||
showBinaryChoiceDialog,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tmpDir
|
||||
} from './helpers';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
import { ideServerLogger, logger, queryServerLogger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
import { FullCompletedQueryInfo, FullQueryInfo } from './query-results';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { displayQuickQuery } from './quick-query';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo, tmpDirDisposal } from './run-queries';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from './run-queries';
|
||||
import { QLTestAdapterFactory } from './test-adapter';
|
||||
import { TestUIService } from './test-ui';
|
||||
import { CompareInterfaceManager } from './compare/compare-interface';
|
||||
@@ -80,13 +92,12 @@ import { CodeQlStatusBarHandler } from './status-bar';
|
||||
|
||||
import { Credentials } from './authentication';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
import { RemoteQueryResult } from './remote-queries/remote-query-result';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries/remote-queries-interface';
|
||||
import * as sampleData from './remote-queries/sample-data';
|
||||
import { handleDownloadPacks, handleInstallPackDependencies } from './packaging';
|
||||
import { AnalysesResultsManager } from './remote-queries/analyses-results-manager';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { exportRemoteQueryResults } from './remote-queries/export-results';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
@@ -188,7 +199,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const shouldUpdateOnNextActivationKey = 'shouldUpdateOnNextActivation';
|
||||
|
||||
registerErrorStubs([checkForUpdatesCommand], command => (async () => {
|
||||
void helpers.showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
void showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
}));
|
||||
|
||||
interface DistributionUpdateConfig {
|
||||
@@ -200,7 +211,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
async function installOrUpdateDistributionWithProgressTitle(progressTitle: string, config: DistributionUpdateConfig): Promise<void> {
|
||||
const minSecondsSinceLastUpdateCheck = config.isUserInitiated ? 0 : 86400;
|
||||
const noUpdatesLoggingFunc = config.shouldDisplayMessageWhenNoUpdates ?
|
||||
helpers.showAndLogInformationMessage : async (message: string) => void logger.log(message);
|
||||
showAndLogInformationMessage : async (message: string) => void logger.log(message);
|
||||
const result = await distributionManager.checkForUpdatesToExtensionManagedDistribution(minSecondsSinceLastUpdateCheck);
|
||||
|
||||
// We do want to auto update if there is no distribution at all
|
||||
@@ -222,7 +233,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const updateAvailableMessage = `Version "${result.updatedRelease.name}" of the CodeQL CLI is now available. ` +
|
||||
'Do you wish to upgrade?';
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, true);
|
||||
if (await helpers.showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
if (await showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
await commands.executeCommand('workbench.action.reloadWindow');
|
||||
}
|
||||
} else {
|
||||
@@ -235,7 +246,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
distributionManager.installExtensionManagedDistributionRelease(result.updatedRelease, progress));
|
||||
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, false);
|
||||
void helpers.showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
void showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
@@ -262,7 +273,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
// Don't rethrow the exception, because if the config is changed, we want to be able to retry installing
|
||||
// or updating the distribution.
|
||||
const alertFunction = (codeQlInstalled && !config.isUserInitiated) ?
|
||||
helpers.showAndLogWarningMessage : helpers.showAndLogErrorMessage;
|
||||
showAndLogWarningMessage : showAndLogErrorMessage;
|
||||
const taskDescription = (willUpdateCodeQl ? 'update' :
|
||||
codeQlInstalled ? 'check for updates to' : 'install') + ' CodeQL CLI';
|
||||
|
||||
@@ -297,20 +308,20 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
}
|
||||
})();
|
||||
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
`The current version of the CodeQL CLI (${result.version.raw}) ` +
|
||||
`is incompatible with this extension. ${fixGuidanceMessage}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
case FindDistributionResultKind.UnknownCompatibilityDistribution:
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
'Compatibility with the configured CodeQL CLI could not be determined. ' +
|
||||
'You may experience problems using the extension.'
|
||||
);
|
||||
break;
|
||||
case FindDistributionResultKind.NoDistribution:
|
||||
void helpers.showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
void showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
break;
|
||||
default:
|
||||
assertNever(result);
|
||||
@@ -337,7 +348,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
} else if (distributionResult.kind === FindDistributionResultKind.NoDistribution) {
|
||||
registerErrorStubs([checkForUpdatesCommand], command => async () => {
|
||||
const installActionName = 'Install CodeQL CLI';
|
||||
const chosenAction = await void helpers.showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
const chosenAction = await void showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
items: [installActionName]
|
||||
});
|
||||
if (chosenAction === installActionName) {
|
||||
@@ -425,7 +436,8 @@ async function activateWithInstalledDistribution(
|
||||
dbm,
|
||||
qs,
|
||||
getContextStoragePath(ctx),
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
() => Credentials.initialize(ctx),
|
||||
);
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
@@ -433,21 +445,34 @@ async function activateWithInstalledDistribution(
|
||||
void logger.log('Initializing query history manager.');
|
||||
const queryHistoryConfigurationListener = new QueryHistoryConfigListener();
|
||||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
const showResults = async (item: FullCompletedQueryInfo) =>
|
||||
const showResults = async (item: CompletedLocalQueryInfo) =>
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
const labelProvider = new HistoryItemLabelProvider(queryHistoryConfigurationListener);
|
||||
|
||||
void logger.log('Initializing query history.');
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
dbm,
|
||||
ctx.extensionPath,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
showResults,
|
||||
async (from: FullCompletedQueryInfo, to: FullCompletedQueryInfo) =>
|
||||
labelProvider,
|
||||
async (from: CompletedLocalQueryInfo, to: CompletedLocalQueryInfo) =>
|
||||
showResultsForComparison(from, to),
|
||||
);
|
||||
|
||||
qhm.onWillOpenQueryItem(async item => {
|
||||
if (item.t === 'local' && item.completed) {
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.Forced);
|
||||
}
|
||||
});
|
||||
|
||||
ctx.subscriptions.push(qhm);
|
||||
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger);
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(intm);
|
||||
|
||||
void logger.log('Initializing compare panel interface.');
|
||||
@@ -456,6 +481,7 @@ async function activateWithInstalledDistribution(
|
||||
dbm,
|
||||
cliServer,
|
||||
queryServerLogger,
|
||||
labelProvider,
|
||||
showResults
|
||||
);
|
||||
ctx.subscriptions.push(cmpm);
|
||||
@@ -464,18 +490,18 @@ async function activateWithInstalledDistribution(
|
||||
archiveFilesystemProvider.activate(ctx);
|
||||
|
||||
async function showResultsForComparison(
|
||||
from: FullCompletedQueryInfo,
|
||||
to: FullCompletedQueryInfo
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo
|
||||
): Promise<void> {
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
} catch (e) {
|
||||
void helpers.showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function showResultsForCompletedQuery(
|
||||
query: FullCompletedQueryInfo,
|
||||
query: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal
|
||||
): Promise<void> {
|
||||
await intm.showResults(query, forceReveal, false);
|
||||
@@ -505,7 +531,7 @@ async function activateWithInstalledDistribution(
|
||||
token.onCancellationRequested(() => source.cancel());
|
||||
|
||||
const initialInfo = await createInitialQueryInfo(selectedQuery, databaseInfo, quickEval, range);
|
||||
const item = new FullQueryInfo(initialInfo, queryHistoryConfigurationListener, source);
|
||||
const item = new LocalQueryInfo(initialInfo, source);
|
||||
qhm.addQuery(item);
|
||||
try {
|
||||
const completedQueryInfo = await compileAndRunQueryAgainstDatabase(
|
||||
@@ -513,18 +539,23 @@ async function activateWithInstalledDistribution(
|
||||
qs,
|
||||
databaseItem,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
item.completeThisQuery(completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as FullCompletedQueryInfo, WebviewReveal.NotForced);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
item.failureReason = e.message;
|
||||
const err = asError(e);
|
||||
err.message = `Error running query: ${err.message}`;
|
||||
item.failureReason = err.message;
|
||||
throw e;
|
||||
} finally {
|
||||
qhm.refreshTreeView();
|
||||
await qhm.refreshTreeView();
|
||||
source.dispose();
|
||||
}
|
||||
}
|
||||
@@ -546,11 +577,11 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
await commands.executeCommand('markdown.showPreviewToSide', uri);
|
||||
} catch (err) {
|
||||
const errorMessage = err.message.includes('Generating qhelp in markdown') ? (
|
||||
} catch (e) {
|
||||
const errorMessage = getErrorMessage(e).includes('Generating qhelp in markdown') ? (
|
||||
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
|
||||
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
|
||||
void helpers.showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${e}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -567,7 +598,7 @@ async function activateWithInstalledDistribution(
|
||||
const uri = Uri.file(resolved.resolvedPath);
|
||||
await window.showTextDocument(uri, { preview: false });
|
||||
} else {
|
||||
void helpers.showAndLogErrorMessage(
|
||||
void showAndLogErrorMessage(
|
||||
'Jumping from a .qlref file to the .ql file it references is not '
|
||||
+ 'supported with the CLI version you are running.\n'
|
||||
+ `Please upgrade your CLI to version ${CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF
|
||||
@@ -621,7 +652,10 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
}
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
interface DatabaseQuickPickItem extends QuickPickItem {
|
||||
@@ -637,15 +671,15 @@ async function activateWithInstalledDistribution(
|
||||
) => {
|
||||
let filteredDBs = dbm.databaseItems;
|
||||
if (filteredDBs.length === 0) {
|
||||
void helpers.showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
|
||||
void showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
|
||||
return;
|
||||
}
|
||||
// If possible, only show databases with the right language (otherwise show all databases).
|
||||
const queryLanguage = await helpers.findLanguage(cliServer, uri);
|
||||
const queryLanguage = await findLanguage(cliServer, uri);
|
||||
if (queryLanguage) {
|
||||
filteredDBs = dbm.databaseItems.filter(db => db.language === queryLanguage);
|
||||
if (filteredDBs.length === 0) {
|
||||
void helpers.showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
|
||||
void showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -670,19 +704,19 @@ async function activateWithInstalledDistribution(
|
||||
for (const item of quickpick) {
|
||||
try {
|
||||
await compileAndRunQuery(false, uri, progress, token, item.databaseItem);
|
||||
} catch (error) {
|
||||
} catch (e) {
|
||||
skippedDatabases.push(item.label);
|
||||
errors.push(error.message);
|
||||
errors.push(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
if (skippedDatabases.length > 0) {
|
||||
void logger.log(`Errors:\n${errors.join('\n')}`);
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
`The following databases were skipped:\n${skippedDatabases.join('\n')}.\nFor details about the errors, see the logs.`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
void helpers.showAndLogErrorMessage('No databases selected.');
|
||||
void showAndLogErrorMessage('No databases selected.');
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -709,7 +743,7 @@ async function activateWithInstalledDistribution(
|
||||
// files may be hidden from the user.
|
||||
if (dirFound) {
|
||||
const fileString = files.map(file => path.basename(file)).join(', ');
|
||||
const res = await helpers.showBinaryChoiceDialog(
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`You are about to run ${files.length} queries: ${fileString} Do you want to continue?`
|
||||
);
|
||||
if (!res) {
|
||||
@@ -753,7 +787,11 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running queries',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
@@ -766,7 +804,10 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -781,7 +822,11 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -792,18 +837,27 @@ async function activateWithInstalledDistribution(
|
||||
displayQuickQuery(ctx, cliServer, databaseUI, progress, token),
|
||||
{
|
||||
title: 'Run Quick Query'
|
||||
}
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
void logger.log('Initializing remote queries interface.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, logger);
|
||||
void logger.log('Initializing variant analysis results view.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, qhm, queryStorageDir, logger);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
// wait until after the remote queries manager is initialized to read the query history
|
||||
// since the rqm is notified of queries being added.
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
|
||||
registerRemoteQueryTextProvider();
|
||||
|
||||
// The "runRemoteQuery" command is internal-only.
|
||||
// The "runVariantAnalysis" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.runRemoteQuery', async (
|
||||
commandRunnerWithProgress('codeQL.runVariantAnalysis', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
@@ -820,19 +874,19 @@ async function activateWithInstalledDistribution(
|
||||
token
|
||||
);
|
||||
} else {
|
||||
throw new Error('Remote queries require the CodeQL Canary version to run.');
|
||||
throw new Error('Variant analysis requires the CodeQL Canary version to run.');
|
||||
}
|
||||
}, {
|
||||
title: 'Run Remote Query',
|
||||
title: 'Run Variant Analysis',
|
||||
cancellable: true
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorRemoteQuery', async (
|
||||
query: RemoteQuery,
|
||||
queryItem: RemoteQueryHistoryItem,
|
||||
token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(query, token);
|
||||
await rqm.monitorRemoteQuery(queryItem, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -843,15 +897,10 @@ async function activateWithInstalledDistribution(
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.showFakeRemoteQueryResults', async () => {
|
||||
const analysisResultsManager = new AnalysesResultsManager(ctx, logger);
|
||||
const rqim = new RemoteQueriesInterfaceManager(ctx, logger, analysisResultsManager);
|
||||
await rqim.showResults(sampleData.sampleRemoteQuery, sampleData.sampleRemoteQueryResult);
|
||||
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage1);
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage2);
|
||||
await rqim.setAnalysisResults(sampleData.sampleAnalysesResultsStage3);
|
||||
}));
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async () => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
@@ -873,7 +922,7 @@ async function activateWithInstalledDistribution(
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await qs.restartQueryServer(progress, token);
|
||||
void helpers.showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
void showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
outputLogger: queryServerLogger,
|
||||
});
|
||||
}, {
|
||||
@@ -899,6 +948,18 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Choose a Database from an Archive'
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseGithub', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseLgtm', (
|
||||
progress: ProgressCallback,
|
||||
@@ -929,7 +990,7 @@ async function activateWithInstalledDistribution(
|
||||
commandRunner('codeQL.copyVersion', async () => {
|
||||
const text = `CodeQL extension version: ${extension?.packageJSON.version} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${os.platform()} ${os.arch()}`;
|
||||
await env.clipboard.writeText(text);
|
||||
void helpers.showAndLogInformationMessage(text);
|
||||
void showAndLogInformationMessage(text);
|
||||
}));
|
||||
|
||||
const getCliVersion = async () => {
|
||||
@@ -951,7 +1012,7 @@ async function activateWithInstalledDistribution(
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void helpers.showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}
|
||||
}));
|
||||
|
||||
@@ -986,18 +1047,24 @@ async function activateWithInstalledDistribution(
|
||||
|
||||
// Jump-to-definition and find-references
|
||||
void logger.log('Registering jump-to-definition handlers.');
|
||||
|
||||
// Store contextual queries in a temporary folder so that they are removed
|
||||
// when the application closes. There is no need for the user to interact with them.
|
||||
const contextualQueryStorageDir = path.join(tmpDir.name, 'contextual-query-storage');
|
||||
await fs.ensureDir(contextualQueryStorageDir);
|
||||
languages.registerDefinitionProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
languages.registerReferenceProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
const astViewer = new AstViewer();
|
||||
const templateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm);
|
||||
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
|
||||
const cfgTemplateProvider = new TemplatePrintCfgProvider(cliServer, dbm);
|
||||
|
||||
ctx.subscriptions.push(astViewer);
|
||||
ctx.subscriptions.push(commandRunnerWithProgress('codeQL.viewAst', async (
|
||||
@@ -1005,7 +1072,7 @@ async function activateWithInstalledDistribution(
|
||||
token: CancellationToken,
|
||||
selectedFile: Uri
|
||||
) => {
|
||||
const ast = await templateProvider.provideAst(
|
||||
const ast = await printAstTemplateProvider.provideAst(
|
||||
progress,
|
||||
token,
|
||||
selectedFile ?? window.activeTextEditor?.document.uri,
|
||||
@@ -1018,6 +1085,25 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Calculate AST'
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.viewCfg',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const res = await cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
|
||||
if (res) {
|
||||
await compileAndRunQuery(false, res[0], progress, token, undefined);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Calculating Control Flow Graph',
|
||||
cancellable: true
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
await commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
|
||||
void logger.log('Successfully finished extension initialization.');
|
||||
@@ -1036,13 +1122,10 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
|
||||
function getContextStoragePath(ctx: ExtensionContext) {
|
||||
return ctx.storagePath || ctx.globalStoragePath;
|
||||
return ctx.storageUri?.fsPath || ctx.globalStorageUri.fsPath;
|
||||
}
|
||||
|
||||
async function initializeLogging(ctx: ExtensionContext): Promise<void> {
|
||||
const storagePath = getContextStoragePath(ctx);
|
||||
await logger.setLogStoragePath(storagePath, false);
|
||||
await ideServerLogger.setLogStoragePath(storagePath, false);
|
||||
ctx.subscriptions.push(logger);
|
||||
ctx.subscriptions.push(queryServerLogger);
|
||||
ctx.subscriptions.push(ideServerLogger);
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as fs from 'fs-extra';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import {
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
@@ -14,6 +15,17 @@ import { UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { QueryMetadata } from './pure/interface-types';
|
||||
|
||||
// Shared temporary folder for the extension.
|
||||
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
|
||||
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
|
||||
fs.ensureDirSync(upgradesTmpDir);
|
||||
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Show an error message and log it to the console
|
||||
*
|
||||
@@ -64,9 +76,10 @@ export async function showAndLogWarningMessage(message: string, {
|
||||
*/
|
||||
export async function showAndLogInformationMessage(message: string, {
|
||||
outputLogger = logger,
|
||||
items = [] as string[]
|
||||
items = [] as string[],
|
||||
fullMessage = ''
|
||||
} = {}): Promise<string | undefined> {
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage);
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
|
||||
}
|
||||
|
||||
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
||||
@@ -533,3 +546,38 @@ export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the query directory that indicates when this query was created.
|
||||
* This is important for keeping track of when queries should be removed.
|
||||
*
|
||||
* @param queryPath The directory that will containt all files relevant to a query result.
|
||||
* It does not need to exist.
|
||||
*/
|
||||
export async function createTimestampFile(storagePath: string) {
|
||||
const timestampPath = path.join(storagePath, 'timestamp');
|
||||
await fs.ensureDir(storagePath);
|
||||
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively walk a directory and return the full path to all files found.
|
||||
* Symbolic links are ignored.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await fs.opendir(dir)) {
|
||||
const entry = path.join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
} else if (d.isFile()) {
|
||||
yield entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
82
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
82
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
q: string; // Query name
|
||||
d: string; // Database/Controller repo name
|
||||
r: string; // Result count/Empty
|
||||
s: string; // Status
|
||||
f: string; // Query file name
|
||||
'%': '%'; // Percent sign
|
||||
}
|
||||
|
||||
export class HistoryItemLabelProvider {
|
||||
constructor(private config: QueryHistoryConfig) {
|
||||
/**/
|
||||
}
|
||||
|
||||
getLabel(item: QueryHistoryInfo) {
|
||||
const replacements = item.t === 'local'
|
||||
? this.getLocalInterpolateReplacements(item)
|
||||
: this.getRemoteInterpolateReplacements(item);
|
||||
|
||||
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||
|
||||
return this.interpolate(rawLabel, replacements);
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a user-specified label for this query, interpolate and use that.
|
||||
* Otherwise, use the raw name of this query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(item: QueryHistoryInfo): string {
|
||||
return item.userSpecifiedLabel
|
||||
? this.getLabel(item)
|
||||
: item.t === 'local'
|
||||
? item.getQueryName()
|
||||
: item.remoteQuery.queryName;
|
||||
}
|
||||
|
||||
|
||||
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||
return rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
}
|
||||
|
||||
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
|
||||
return {
|
||||
t: item.startTime,
|
||||
q: item.getQueryName(),
|
||||
d: item.initialInfo.databaseInfo.name,
|
||||
r: `${resultCount} results`,
|
||||
s: statusString,
|
||||
f: item.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: item.remoteQuery.queryName,
|
||||
|
||||
// There is no database name for remote queries. Instead use the controller repository name.
|
||||
d: `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`,
|
||||
|
||||
// There is no synchronous way to get the results count.
|
||||
r: '',
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
'%': '%'
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -134,7 +134,7 @@ export function getHtmlForWebview(
|
||||
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
|
||||
|
||||
const styleSrc = allowInlineStyles
|
||||
? 'https://*.vscode-webview.net/ vscode-file: \'unsafe-inline\''
|
||||
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
|
||||
: `'nonce-${nonce}'`;
|
||||
|
||||
/*
|
||||
|
||||
@@ -14,8 +14,8 @@ import {
|
||||
import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
Interpretation,
|
||||
@@ -27,13 +27,14 @@ import {
|
||||
InterpretedResultsSortState,
|
||||
SortDirection,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
RawResultsSortState,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { CompletedQueryInfo, interpretResults } from './query-results';
|
||||
import { QueryEvaluationInfo, tmpDir } from './run-queries';
|
||||
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||
import {
|
||||
WebviewReveal,
|
||||
@@ -47,7 +48,8 @@ import {
|
||||
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||
import { PAGE_SIZE } from './config';
|
||||
import { FullCompletedQueryInfo } from './query-results';
|
||||
import { CompletedLocalQueryInfo } from './query-results';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
|
||||
/**
|
||||
* interface.ts
|
||||
@@ -88,16 +90,40 @@ function sortInterpretedResults(
|
||||
}
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet): number {
|
||||
return Math.ceil(resultSet.schema.rows / PAGE_SIZE.getValue<number>());
|
||||
function interpretedPageSize(interpretation: Interpretation | undefined): number {
|
||||
if (interpretation?.data.t == 'GraphInterpretationData') {
|
||||
// Graph views always have one result per page.
|
||||
return 1;
|
||||
}
|
||||
return PAGE_SIZE.getValue<number>();
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation?.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: resultSet.schema.rows;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
||||
return Math.ceil((interpretation?.sarif.runs[0].results?.length || 0) / PAGE_SIZE.getValue<number>());
|
||||
if (!interpretation) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: interpretation.data.runs[0].results?.length || 0;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
export class InterfaceManager extends DisposableObject {
|
||||
private _displayedQuery?: FullCompletedQueryInfo;
|
||||
private _displayedQuery?: CompletedLocalQueryInfo;
|
||||
private _interpretation?: Interpretation;
|
||||
private _panel: vscode.WebviewPanel | undefined;
|
||||
private _panelLoaded = false;
|
||||
@@ -111,7 +137,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
public ctx: vscode.ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
public cliServer: CodeQLCliServer,
|
||||
public logger: Logger
|
||||
public logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider
|
||||
) {
|
||||
super();
|
||||
this.push(this._diagnosticCollection);
|
||||
@@ -181,6 +208,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
() => {
|
||||
this._panel = undefined;
|
||||
this._displayedQuery = undefined;
|
||||
this._panelLoaded = false;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
@@ -305,7 +333,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME) {
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
@@ -327,8 +355,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message, {
|
||||
fullMessage: e.stack
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -357,7 +385,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
* history entry.
|
||||
*/
|
||||
public async showResults(
|
||||
fullQuery: FullCompletedQueryInfo,
|
||||
fullQuery: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal,
|
||||
shouldKeepOldResultsWhileRendering = false
|
||||
): Promise<void> {
|
||||
@@ -390,7 +418,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// more asynchronous message to not so abruptly interrupt
|
||||
// user's workflow by immediately revealing the panel.
|
||||
const showButton = 'View Results';
|
||||
const queryName = fullQuery.getShortLabel();
|
||||
const queryName = this.labelProvider.getShortLabel(fullQuery);
|
||||
const resultPromise = vscode.window.showInformationMessage(
|
||||
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
|
||||
}.`,
|
||||
@@ -438,7 +466,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
const parsedResultSets: ParsedResultSets = {
|
||||
pageNumber: 0,
|
||||
pageSize,
|
||||
numPages: numPagesOfResultSet(resultSet),
|
||||
numPages: numPagesOfResultSet(resultSet, this._interpretation),
|
||||
numInterpretedPages: numInterpretedPages(this._interpretation),
|
||||
resultSet: { ...resultSet, t: 'RawResultSet' },
|
||||
selectedTable: undefined,
|
||||
@@ -457,7 +485,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
database: fullQuery.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering,
|
||||
metadata: fullQuery.completedQuery.query.metadata,
|
||||
queryName: fullQuery.label,
|
||||
queryName: this.labelProvider.getLabel(fullQuery),
|
||||
queryPath: fullQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -474,7 +502,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
if (this._interpretation === undefined) {
|
||||
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
||||
}
|
||||
if (this._interpretation.sarif.runs[0].results === undefined) {
|
||||
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
|
||||
throw new Error('Trying to show interpreted results but results were undefined');
|
||||
}
|
||||
|
||||
@@ -488,9 +516,9 @@ export class InterfaceManager extends DisposableObject {
|
||||
metadata: this._displayedQuery.completedQuery.query.metadata,
|
||||
pageNumber,
|
||||
resultSetNames,
|
||||
pageSize: PAGE_SIZE.getValue(),
|
||||
pageSize: interpretedPageSize(this._interpretation),
|
||||
numPages: numInterpretedPages(this._interpretation),
|
||||
queryName: this._displayedQuery.label,
|
||||
queryName: this.labelProvider.getLabel(this._displayedQuery),
|
||||
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -575,7 +603,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
database: results.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering: false,
|
||||
metadata: results.completedQuery.query.metadata,
|
||||
queryName: results.label,
|
||||
queryName: this.labelProvider.getLabel(results),
|
||||
queryPath: results.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
@@ -591,28 +619,45 @@ export class InterfaceManager extends DisposableObject {
|
||||
void this.logger.log('No results path. Cannot display interpreted results.');
|
||||
return undefined;
|
||||
}
|
||||
let data;
|
||||
let numTotalResults;
|
||||
if (metadata?.kind === GRAPH_TABLE_NAME) {
|
||||
data = await interpretGraphResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
numTotalResults = data.dot.length;
|
||||
} else {
|
||||
const sarif = await interpretResultsSarif(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
|
||||
const sarif = await interpretResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results !== undefined) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
sarif.sortState = sortState;
|
||||
data = sarif;
|
||||
|
||||
const numTotalResults = sarif.runs[0]?.results?.length || 0;
|
||||
numTotalResults = (() => {
|
||||
return sarif.runs?.[0]?.results
|
||||
? sarif.runs[0].results.length
|
||||
: 0;
|
||||
})();
|
||||
}
|
||||
|
||||
const interpretation: Interpretation = {
|
||||
sarif,
|
||||
data,
|
||||
sourceLocationPrefix,
|
||||
numTruncatedResults: 0,
|
||||
numTotalResults,
|
||||
sortState,
|
||||
numTotalResults
|
||||
};
|
||||
this._interpretation = interpretation;
|
||||
return interpretation;
|
||||
@@ -621,7 +666,6 @@ export class InterfaceManager extends DisposableObject {
|
||||
private getPageOfInterpretedResults(
|
||||
pageNumber: number
|
||||
): Interpretation {
|
||||
|
||||
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
||||
return {
|
||||
...run, results: run.results?.slice(
|
||||
@@ -631,16 +675,24 @@ export class InterfaceManager extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
if (this._interpretation === undefined) {
|
||||
const interp = this._interpretation;
|
||||
if (interp === undefined) {
|
||||
throw new Error('Tried to get interpreted results before interpretation finished');
|
||||
}
|
||||
if (this._interpretation.sarif.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${this._interpretation.sarif.runs.length} runs, expected 1`);
|
||||
|
||||
if (interp.data.t !== 'SarifInterpretationData')
|
||||
return interp;
|
||||
|
||||
if (interp.data.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
|
||||
}
|
||||
const interp = this._interpretation;
|
||||
|
||||
return {
|
||||
...interp,
|
||||
sarif: { ...interp.sarif, runs: [getPageOfRun(interp.sarif.runs[0])] },
|
||||
data: {
|
||||
...interp.data,
|
||||
runs: [getPageOfRun(interp.data.runs[0])]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -679,7 +731,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// If interpretation fails, accept the error and continue
|
||||
// trying to render uninterpreted results anyway.
|
||||
void showAndLogErrorMessage(
|
||||
`Showing raw results instead of interpreted ones due to an error. ${e.message}`
|
||||
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -718,9 +770,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
try {
|
||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : e.toString();
|
||||
void this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${msg}`
|
||||
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||
);
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
@@ -730,9 +781,12 @@ export class InterfaceManager extends DisposableObject {
|
||||
interpretation: Interpretation,
|
||||
databaseItem: DatabaseItem
|
||||
): Promise<void> {
|
||||
const { sarif, sourceLocationPrefix } = interpretation;
|
||||
const { data, sourceLocationPrefix } = interpretation;
|
||||
|
||||
if (!sarif.runs || !sarif.runs[0].results) {
|
||||
if (data.t !== 'SarifInterpretationData')
|
||||
return;
|
||||
|
||||
if (!data.runs || !data.runs[0].results) {
|
||||
void this.logger.log(
|
||||
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
||||
);
|
||||
@@ -741,7 +795,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
||||
|
||||
for (const result of sarif.runs[0].results) {
|
||||
for (const result of data.runs[0].results) {
|
||||
const message = result.message.text;
|
||||
if (message === undefined) {
|
||||
void this.logger.log('Sarif had result without plaintext message');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { window as Window, OutputChannel, Progress, Disposable } from 'vscode';
|
||||
import { window as Window, OutputChannel, Progress } from 'vscode';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
@@ -26,18 +26,6 @@ export interface Logger {
|
||||
* @param location log to remove
|
||||
*/
|
||||
removeAdditionalLogLocation(location: string | undefined): void;
|
||||
|
||||
/**
|
||||
* The base location where all side log files are stored.
|
||||
*/
|
||||
getBaseLocation(): string | undefined;
|
||||
|
||||
/**
|
||||
* Sets the location where logs are stored.
|
||||
* @param storagePath The path where logs are stored.
|
||||
* @param isCustomLogDirectory Whether the logs are stored in a custom, user-specified directory.
|
||||
*/
|
||||
setLogStoragePath(storagePath: string, isCustomLogDirectory: boolean): Promise<void>;
|
||||
}
|
||||
|
||||
export type ProgressReporter = Progress<{ message: string }>;
|
||||
@@ -46,27 +34,15 @@ export type ProgressReporter = Progress<{ message: string }>;
|
||||
export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
public readonly outputChannel: OutputChannel;
|
||||
private readonly additionalLocations = new Map<string, AdditionalLogLocation>();
|
||||
private additionalLogLocationPath: string | undefined;
|
||||
isCustomLogDirectory: boolean;
|
||||
|
||||
constructor(private title: string) {
|
||||
constructor(title: string) {
|
||||
super();
|
||||
this.outputChannel = Window.createOutputChannel(title);
|
||||
this.push(this.outputChannel);
|
||||
this.isCustomLogDirectory = false;
|
||||
}
|
||||
|
||||
async setLogStoragePath(storagePath: string, isCustomLogDirectory: boolean): Promise<void> {
|
||||
this.additionalLogLocationPath = path.join(storagePath, this.title);
|
||||
|
||||
this.isCustomLogDirectory = isCustomLogDirectory;
|
||||
|
||||
if (!this.isCustomLogDirectory) {
|
||||
// clear out any old state from previous runs
|
||||
await fs.remove(this.additionalLogLocationPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is asynchronous and will only resolve once the message is written
|
||||
* to the side log (if required). It is not necessary to await the results of this
|
||||
@@ -84,8 +60,11 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
this.outputChannel.append(message);
|
||||
}
|
||||
|
||||
if (this.additionalLogLocationPath && options.additionalLogLocation) {
|
||||
const logPath = path.join(this.additionalLogLocationPath, options.additionalLogLocation);
|
||||
if (options.additionalLogLocation) {
|
||||
if (!path.isAbsolute(options.additionalLogLocation)) {
|
||||
throw new Error(`Additional Log Location must be an absolute path: ${options.additionalLogLocation}`);
|
||||
}
|
||||
const logPath = options.additionalLogLocation;
|
||||
let additional = this.additionalLocations.get(logPath);
|
||||
if (!additional) {
|
||||
const msg = `| Log being saved to ${logPath} |`;
|
||||
@@ -93,9 +72,8 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
this.outputChannel.appendLine(separator);
|
||||
this.outputChannel.appendLine(msg);
|
||||
this.outputChannel.appendLine(separator);
|
||||
additional = new AdditionalLogLocation(logPath, !this.isCustomLogDirectory);
|
||||
additional = new AdditionalLogLocation(logPath);
|
||||
this.additionalLocations.set(logPath, additional);
|
||||
this.track(additional);
|
||||
}
|
||||
|
||||
await additional.log(message, options);
|
||||
@@ -115,26 +93,15 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
}
|
||||
|
||||
removeAdditionalLogLocation(location: string | undefined): void {
|
||||
if (this.additionalLogLocationPath && location) {
|
||||
const logPath = location.startsWith(this.additionalLogLocationPath)
|
||||
? location
|
||||
: path.join(this.additionalLogLocationPath, location);
|
||||
const additional = this.additionalLocations.get(logPath);
|
||||
if (additional) {
|
||||
this.disposeAndStopTracking(additional);
|
||||
this.additionalLocations.delete(logPath);
|
||||
}
|
||||
if (location) {
|
||||
this.additionalLocations.delete(location);
|
||||
}
|
||||
}
|
||||
|
||||
getBaseLocation() {
|
||||
return this.additionalLogLocationPath;
|
||||
}
|
||||
}
|
||||
|
||||
class AdditionalLogLocation extends Disposable {
|
||||
constructor(private location: string, private shouldDeleteLogs: boolean) {
|
||||
super(() => { /**/ });
|
||||
class AdditionalLogLocation {
|
||||
constructor(private location: string) {
|
||||
/**/
|
||||
}
|
||||
|
||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||
@@ -147,12 +114,6 @@ class AdditionalLogLocation extends Disposable {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
|
||||
async dispose(): Promise<void> {
|
||||
if (this.shouldDeleteLogs) {
|
||||
await fs.remove(this.location);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** The global logger for the extension. */
|
||||
|
||||
@@ -79,11 +79,11 @@ export interface WholeFileLocation {
|
||||
|
||||
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
|
||||
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
|
||||
export type ColumnValue = EntityValue | number | string | boolean;
|
||||
export type CellValue = EntityValue | number | string | boolean;
|
||||
|
||||
export type ResultRow = ColumnValue[];
|
||||
export type ResultRow = CellValue[];
|
||||
|
||||
export interface RawResultSet {
|
||||
readonly schema: ResultSetSchema;
|
||||
@@ -104,6 +104,6 @@ export function transformBqrsResultSet(
|
||||
}
|
||||
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: ColumnValue[][];
|
||||
tuples: CellValue[][];
|
||||
next?: number;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
LineColumnLocation,
|
||||
WholeFileLocation
|
||||
} from './bqrs-cli-types';
|
||||
import { createRemoteFileRef } from './location-link-utils';
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
@@ -93,3 +94,30 @@ export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
export function isStringLoc(loc: UrlValue): loc is string {
|
||||
return typeof loc === 'string';
|
||||
}
|
||||
|
||||
export function tryGetRemoteLocation(
|
||||
loc: UrlValue | undefined,
|
||||
fileLinkPrefix: string
|
||||
): string | undefined {
|
||||
const resolvableLocation = tryGetResolvableLocation(loc);
|
||||
if (!resolvableLocation) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Remote locations have the following format:
|
||||
// file:/home/runner/work/<repo>/<repo/relative/path/to/file
|
||||
// So we need to drop the first 6 parts of the path.
|
||||
|
||||
// TODO: We can make this more robust to other path formats.
|
||||
const locationParts = resolvableLocation.uri.split('/');
|
||||
const trimmedLocation = locationParts.slice(6, locationParts.length).join('/');
|
||||
|
||||
const fileLink = {
|
||||
fileLinkPrefix,
|
||||
filePath: trimmedLocation,
|
||||
};
|
||||
return createRemoteFileRef(
|
||||
fileLink,
|
||||
resolvableLocation.startLine,
|
||||
resolvableLocation.endLine);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
/**
|
||||
* helpers-pure.ts
|
||||
* ------------
|
||||
@@ -29,3 +30,27 @@ export const asyncFilter = async function <T>(arr: T[], predicate: (arg0: T) =>
|
||||
const results = await Promise.all(arr.map(predicate));
|
||||
return arr.filter((_, index) => results[index]);
|
||||
};
|
||||
|
||||
export const ONE_DAY_IN_MS = 24 * 60 * 60 * 1000;
|
||||
export const ONE_HOUR_IN_MS = 1000 * 60 * 60;
|
||||
export const TWO_HOURS_IN_MS = 1000 * 60 * 60 * 2;
|
||||
export const THREE_HOURS_IN_MS = 1000 * 60 * 60 * 3;
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters or single hyphens, starting and ending in an alphanumeric character
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, or underscores
|
||||
*/
|
||||
export const REPO_REGEX = /^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+\/[a-zA-Z0-9-_]+$/;
|
||||
|
||||
export function getErrorMessage(e: any) {
|
||||
return e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
export function getErrorStack(e: any) {
|
||||
return e instanceof Error ? e.stack ?? '' : '';
|
||||
}
|
||||
|
||||
export function asError(e: any): Error {
|
||||
return e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
@@ -10,15 +10,17 @@ import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationVal
|
||||
|
||||
export const SELECT_TABLE_NAME = '#select';
|
||||
export const ALERTS_TABLE_NAME = 'alerts';
|
||||
export const GRAPH_TABLE_NAME = 'graph';
|
||||
|
||||
export type RawTableResultSet = { t: 'RawResultSet' } & RawResultSet;
|
||||
export type PathTableResultSet = {
|
||||
t: 'SarifResultSet';
|
||||
export type InterpretedResultSet<T> = {
|
||||
t: 'InterpretedResultSet';
|
||||
readonly schema: ResultSetSchema;
|
||||
name: string;
|
||||
} & Interpretation;
|
||||
interpretation: InterpretationT<T>;
|
||||
};
|
||||
|
||||
export type ResultSet = RawTableResultSet | PathTableResultSet;
|
||||
export type ResultSet = RawTableResultSet | InterpretedResultSet<InterpretationData>;
|
||||
|
||||
/**
|
||||
* Only ever show this many rows in a raw result table.
|
||||
@@ -46,18 +48,31 @@ export interface PreviousExecution {
|
||||
durationSeconds: number;
|
||||
}
|
||||
|
||||
export interface Interpretation {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
export type SarifInterpretationData = {
|
||||
t: 'SarifInterpretationData';
|
||||
/**
|
||||
* sortState being undefined means don't sort, just present results in the order
|
||||
* they appear in the sarif file.
|
||||
*/
|
||||
sortState?: InterpretedResultsSortState;
|
||||
sarif: sarif.Log;
|
||||
} & sarif.Log;
|
||||
|
||||
export type GraphInterpretationData = {
|
||||
t: 'GraphInterpretationData';
|
||||
dot: string[];
|
||||
};
|
||||
|
||||
export type InterpretationData = SarifInterpretationData | GraphInterpretationData;
|
||||
|
||||
export interface InterpretationT<T> {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export type Interpretation = InterpretationT<InterpretationData>;
|
||||
|
||||
export interface ResultsPaths {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
@@ -357,8 +372,9 @@ export function getDefaultResultSetName(
|
||||
// Choose first available result set from the array
|
||||
return [
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
SELECT_TABLE_NAME,
|
||||
resultSetNames[0],
|
||||
resultSetNames[0]
|
||||
].filter((resultSetName) => resultSetNames.includes(resultSetName))[0];
|
||||
}
|
||||
|
||||
@@ -379,7 +395,7 @@ export type FromRemoteQueriesMessage =
|
||||
| OpenVirtualFileMsg
|
||||
| RemoteQueryDownloadAnalysisResultsMessage
|
||||
| RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
| RemoteQueryViewAnalysisResultsMessage;
|
||||
| RemoteQueryExportResultsMessage;
|
||||
|
||||
export type ToRemoteQueriesMessage =
|
||||
| SetRemoteQueryResultMessage
|
||||
@@ -414,7 +430,6 @@ export interface RemoteQueryDownloadAllAnalysesResultsMessage {
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryViewAnalysisResultsMessage {
|
||||
t: 'remoteQueryViewAnalysisResults';
|
||||
analysisSummary: AnalysisSummary
|
||||
export interface RemoteQueryExportResultsMessage {
|
||||
t: 'remoteQueryExportResults';
|
||||
}
|
||||
|
||||
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { FileLink } from '../remote-queries/shared/analysis-result';
|
||||
|
||||
export function createRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number
|
||||
): string {
|
||||
if (startLine && endLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}-L${endLine}`;
|
||||
} else if (startLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}`;
|
||||
} else {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}`;
|
||||
}
|
||||
}
|
||||
@@ -646,6 +646,35 @@ export interface ClearCacheParams {
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path where we want to place the new structured log
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path of the log to terminate, will be a no-op if we aren't logging here
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
@@ -682,6 +711,26 @@ export interface ClearCacheResult {
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of starting a new structured log.
|
||||
*/
|
||||
export interface StartLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of terminating a structured log.
|
||||
*/
|
||||
export interface EndLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for running a set of queries
|
||||
*/
|
||||
@@ -1018,6 +1067,16 @@ export const compileUpgrade = new rpc.RequestType<WithProgressId<CompileUpgradeP
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<CompileUpgradeSequenceParams>, CompileUpgradeSequenceResult, void, void>('compilation/compileUpgradeSequence');
|
||||
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as Sarif from 'sarif';
|
||||
import { HighlightedRegion } from '../remote-queries/shared/analysis-result';
|
||||
import { ResolvableLocationValue } from './bqrs-cli-types';
|
||||
|
||||
export interface SarifLink {
|
||||
@@ -127,35 +128,111 @@ export function parseSarifLocation(
|
||||
userVisibleFile
|
||||
} as ParsedSarifLocation;
|
||||
} else {
|
||||
const region = physicalLocation.region;
|
||||
// We assume that the SARIF we're given always has startLine
|
||||
// This is not mandated by the SARIF spec, but should be true of
|
||||
// SARIF output by our own tools.
|
||||
const startLine = region.startLine!;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// We also assume that our tools will always supply `endColumn` field, which is
|
||||
// fortunate, since the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = region.endColumn! - 1;
|
||||
const region = parseSarifRegion(physicalLocation.region);
|
||||
|
||||
return {
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn,
|
||||
...region
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSarifRegion(
|
||||
region: Sarif.Region
|
||||
): {
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
startColumn: number,
|
||||
endColumn: number
|
||||
} {
|
||||
// The SARIF we're given should have a startLine, but we
|
||||
// fall back to 1, just in case something has gone wrong.
|
||||
const startLine = region.startLine ?? 1;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// Our tools should always supply `endColumn` field, which is fortunate, since
|
||||
// the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code. We fall back to 1,
|
||||
// just in case something has gone wrong.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = (region.endColumn ?? 1) - 1;
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn
|
||||
};
|
||||
}
|
||||
|
||||
export function isNoLocation(loc: ParsedSarifLocation): loc is NoLocation {
|
||||
return 'hint' in loc;
|
||||
}
|
||||
|
||||
// Some helpers for highlighting specific regions from a SARIF code snippet
|
||||
|
||||
/**
|
||||
* Checks whether a particular line (determined by its line number in the original file)
|
||||
* is part of the highlighted region of a SARIF code snippet.
|
||||
*/
|
||||
export function shouldHighlightLine(
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): boolean {
|
||||
if (lineNumber < highlightedRegion.startLine) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (highlightedRegion.endLine == undefined) {
|
||||
return lineNumber == highlightedRegion.startLine;
|
||||
}
|
||||
|
||||
return lineNumber <= highlightedRegion.endLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* A line of code split into: plain text before the highlighted section, the highlighted
|
||||
* text itself, and plain text after the highlighted section.
|
||||
*/
|
||||
export interface PartiallyHighlightedLine {
|
||||
plainSection1: string;
|
||||
highlightedSection: string;
|
||||
plainSection2: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a line of code into the highlighted and non-highlighted sections.
|
||||
*/
|
||||
export function parseHighlightedLine(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): PartiallyHighlightedLine {
|
||||
const isSingleLineHighlight = highlightedRegion.endLine === undefined;
|
||||
const isFirstHighlightedLine = lineNumber === highlightedRegion.startLine;
|
||||
const isLastHighlightedLine = lineNumber === highlightedRegion.endLine;
|
||||
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
|
||||
const plainSection1 = line.substring(0, highlightStartColumn - 1);
|
||||
const highlightedSection = line.substring(highlightStartColumn - 1, highlightEndColumn - 1);
|
||||
const plainSection2 = line.substring(highlightEndColumn - 1, line.length);
|
||||
|
||||
return { plainSection1, highlightedSection, plainSection2 };
|
||||
}
|
||||
|
||||
135
extensions/ql-vscode/src/query-history-scrubber.ts
Normal file
135
extensions/ql-vscode/src/query-history-scrubber.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { Disposable, ExtensionContext } from 'vscode';
|
||||
import { logger } from './logging';
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
type Counter = {
|
||||
increment: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers an interval timer that will periodically check for queries old enought
|
||||
* to be deleted.
|
||||
*
|
||||
* Note that this scrubber will clean all queries from all workspaces. It should not
|
||||
* run too often and it should only run from one workspace at a time.
|
||||
*
|
||||
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
|
||||
*
|
||||
* @param wakeInterval How often to check to see if the job should run.
|
||||
* @param throttleTime How often to actually run the job.
|
||||
* @param maxQueryTime The maximum age of a query before is ready for deletion.
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: Counter
|
||||
): Disposable {
|
||||
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, ctx, counter);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
clearInterval(deregister);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function scrubQueries(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
counter?: Counter
|
||||
) {
|
||||
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
|
||||
const now = Date.now();
|
||||
|
||||
// If we have never scrubbed before, or if the last scrub was more than `throttleTime` ago,
|
||||
// then scrub again.
|
||||
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
|
||||
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
|
||||
|
||||
let scrubCount = 0; // total number of directories deleted
|
||||
try {
|
||||
counter?.increment();
|
||||
void logger.log('Scrubbing query directory. Removing old queries.');
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
void logger.log(`Cannot scrub. Query directory does not exist: ${queryDirectory}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
|
||||
if (scrubResult.errorMsg) {
|
||||
errors.push(scrubResult.errorMsg);
|
||||
}
|
||||
if (scrubResult.deleted) {
|
||||
scrubCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(os.EOL + errors.join(os.EOL));
|
||||
}
|
||||
} catch (e) {
|
||||
void logger.log(`Error while scrubbing queries: ${e}`);
|
||||
} finally {
|
||||
void logger.log(`Scrubbed ${scrubCount} old queries.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function scrubDirectory(dir: string, now: number, maxQueryTime: number): Promise<{
|
||||
errorMsg?: string,
|
||||
deleted: boolean
|
||||
}> {
|
||||
const timestampFile = path.join(dir, 'timestamp');
|
||||
try {
|
||||
let deleted = true;
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
void logger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
void logger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
void logger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, 'utf8');
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
|
||||
deleted = false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
deleted
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
errorMsg: ` Could not delete '${dir}': ${err}`,
|
||||
deleted: false
|
||||
};
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,6 @@ import { CancellationTokenSource, env } from 'vscode';
|
||||
import { QueryWithResults, QueryEvaluationInfo } from './run-queries';
|
||||
import * as messages from './pure/messages';
|
||||
import * as cli from './cli';
|
||||
import * as sarif from 'sarif';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
@@ -11,11 +10,21 @@ import {
|
||||
SortedResultSetInfo,
|
||||
QueryMetadata,
|
||||
InterpretedResultsSortState,
|
||||
ResultsPaths
|
||||
ResultsPaths,
|
||||
SarifInterpretationData,
|
||||
GraphInterpretationData
|
||||
} from './pure/interface-types';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
|
||||
/**
|
||||
* query-results.ts
|
||||
* ----------------
|
||||
*
|
||||
* A collection of classes and functions that collectively
|
||||
* manage query results.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A description of the information about a query
|
||||
@@ -33,12 +42,6 @@ export interface InitialQueryInfo {
|
||||
readonly id: string; // unique id for this query.
|
||||
}
|
||||
|
||||
export enum QueryStatus {
|
||||
InProgress = 'InProgress',
|
||||
Completed = 'Completed',
|
||||
Failed = 'Failed',
|
||||
}
|
||||
|
||||
export class CompletedQueryInfo implements QueryWithResults {
|
||||
readonly query: QueryEvaluationInfo;
|
||||
readonly result: messages.EvaluationResult;
|
||||
@@ -148,19 +151,39 @@ export class CompletedQueryInfo implements QueryWithResults {
|
||||
|
||||
|
||||
/**
|
||||
* Call cli command to interpret results.
|
||||
* Call cli command to interpret SARIF results.
|
||||
*/
|
||||
export async function interpretResults(
|
||||
server: cli.CodeQLCliServer,
|
||||
export async function interpretResultsSarif(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<sarif.Log> {
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
return JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8'));
|
||||
return { ...JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8')), t: 'SarifInterpretationData' };
|
||||
}
|
||||
return await server.interpretBqrs(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
const res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { ...res, t: 'SarifInterpretationData' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Call cli command to interpret graph results.
|
||||
*/
|
||||
export async function interpretGraphResults(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
const dot = await cli.interpretBqrsGraph(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
@@ -178,131 +201,49 @@ export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Used in Interface and Compare-Interface for queries that we know have been complated.
|
||||
*/
|
||||
export type FullCompletedQueryInfo = FullQueryInfo & {
|
||||
export type CompletedLocalQueryInfo = LocalQueryInfo & {
|
||||
completedQuery: CompletedQueryInfo
|
||||
};
|
||||
|
||||
export class FullQueryInfo {
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem;
|
||||
|
||||
static async slurp(fsPath: string, config: QueryHistoryConfig): Promise<FullQueryInfo[]> {
|
||||
try {
|
||||
const data = await fs.readFile(fsPath, 'utf8');
|
||||
const queries = JSON.parse(data);
|
||||
return queries.map((q: FullQueryInfo) => {
|
||||
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
Object.setPrototypeOf(q, FullQueryInfo.prototype);
|
||||
|
||||
// The config object is a global, se we need to set it explicitly
|
||||
// and ensure it is not serialized to JSON.
|
||||
q.setConfig(config);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
|
||||
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
|
||||
// slurped queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
}
|
||||
return q;
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', e.stack].join('\n'),
|
||||
});
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the query history to disk. It is not necessary that the parent directory
|
||||
* exists, but if it does, it must be writable. An existing file will be overwritten.
|
||||
*
|
||||
* Any errors will be rethrown.
|
||||
*
|
||||
* @param queries the list of queries to save.
|
||||
* @param fsPath the path to save the queries to.
|
||||
*/
|
||||
static async splat(queries: FullQueryInfo[], fsPath: string): Promise<void> {
|
||||
try {
|
||||
const data = JSON.stringify(queries, null, 2);
|
||||
await fs.mkdirp(path.dirname(fsPath));
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
export class LocalQueryInfo {
|
||||
readonly t = 'local';
|
||||
|
||||
public failureReason: string | undefined;
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
private config: QueryHistoryConfig | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link FullQueryInfo.slurp} method, we create a FullQueryInfo instance
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
public readonly initialInfo: InitialQueryInfo,
|
||||
config: QueryHistoryConfig,
|
||||
private readonly source?: CancellationTokenSource
|
||||
) {
|
||||
this.setConfig(config);
|
||||
}
|
||||
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
|
||||
) { /**/ }
|
||||
|
||||
cancel() {
|
||||
this.source?.cancel();
|
||||
this.cancellationSource?.cancel();
|
||||
// query is no longer in progress, can delete the cancellation token source
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
get startTime() {
|
||||
return this.initialInfo.start.toLocaleString(env.language);
|
||||
}
|
||||
|
||||
interpolate(template: string): string {
|
||||
const { resultCount = 0, statusString = 'in progress' } = this.completedQuery || {};
|
||||
const replacements: { [k: string]: string } = {
|
||||
t: this.startTime,
|
||||
q: this.getQueryName(),
|
||||
d: this.initialInfo.databaseInfo.name,
|
||||
r: resultCount.toString(),
|
||||
s: statusString,
|
||||
f: this.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
return template.replace(/%(.)/g, (match, key) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
get userSpecifiedLabel() {
|
||||
return this.initialInfo.userSpecifiedLabel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a label for this query that includes interpolated values.
|
||||
*/
|
||||
get label(): string {
|
||||
return this.interpolate(
|
||||
this.initialInfo.userSpecifiedLabel ?? this.config?.format ?? ''
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Avoids getting the default label for the query.
|
||||
* If there is a custom label for this query, interpolate and use that.
|
||||
* Otherwise, use the name of the query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(): string {
|
||||
return this.initialInfo.userSpecifiedLabel
|
||||
? this.interpolate(this.initialInfo.userSpecifiedLabel)
|
||||
: this.getQueryName();
|
||||
set userSpecifiedLabel(label: string | undefined) {
|
||||
this.initialInfo.userSpecifiedLabel = label;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -336,12 +277,16 @@ export class FullQueryInfo {
|
||||
}
|
||||
}
|
||||
|
||||
isCompleted(): boolean {
|
||||
get completed(): boolean {
|
||||
return !!this.completedQuery;
|
||||
}
|
||||
|
||||
completeThisQuery(info: QueryWithResults) {
|
||||
this.completedQuery = new CompletedQueryInfo(info);
|
||||
|
||||
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -361,21 +306,4 @@ export class FullQueryInfo {
|
||||
return QueryStatus.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The `config` property must not be serialized since it contains a listerner
|
||||
* for global configuration changes. Instead, It should be set when the query
|
||||
* is deserialized.
|
||||
*
|
||||
* @param config the global query history config object
|
||||
*/
|
||||
private setConfig(config: QueryHistoryConfig) {
|
||||
// avoid serializing config property
|
||||
Object.defineProperty(this, 'config', {
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: true,
|
||||
value: config
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
94
extensions/ql-vscode/src/query-serialization.ts
Normal file
94
extensions/ql-vscode/src/query-serialization.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
|
||||
export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await fs.readFile(fsPath, 'utf8');
|
||||
const obj = JSON.parse(data);
|
||||
if (obj.version !== 1) {
|
||||
void showAndLogErrorMessage(`Unsupported query history format: v${obj.version}. `);
|
||||
return [];
|
||||
}
|
||||
|
||||
const queries = obj.queries;
|
||||
const parsedQueries = queries.map((q: QueryHistoryInfo) => {
|
||||
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
if (q.t === 'local') {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
|
||||
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
|
||||
// slurped queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
}
|
||||
} else if (q.t === 'remote') {
|
||||
// noop
|
||||
}
|
||||
return q;
|
||||
});
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
return asyncFilter(parsedQueries, async (q) => {
|
||||
if (q.t === 'remote') {
|
||||
// the slurper doesn't know where the remote queries are stored
|
||||
// so we need to assume here that they exist. Later, we check to
|
||||
// see if they exist on disk.
|
||||
return true;
|
||||
}
|
||||
const resultsPath = q.completedQuery?.query.resultsPaths.resultsPath;
|
||||
return !!resultsPath && await fs.pathExists(resultsPath);
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', getErrorStack(e)].join('\n'),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the query history to disk. It is not necessary that the parent directory
|
||||
* exists, but if it does, it must be writable. An existing file will be overwritten.
|
||||
*
|
||||
* Any errors will be rethrown.
|
||||
*
|
||||
* @param queries the list of queries to save.
|
||||
* @param fsPath the path to save the queries to.
|
||||
*/
|
||||
export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: string): Promise<void> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
await fs.mkdir(path.dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter(q => q.t === 'local' ? q.completedQuery !== undefined : true);
|
||||
const data = JSON.stringify({
|
||||
version: 1,
|
||||
queries: filteredQueries
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
5
extensions/ql-vscode/src/query-status.ts
Normal file
5
extensions/ql-vscode/src/query-status.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export enum QueryStatus {
|
||||
InProgress = 'InProgress',
|
||||
Completed = 'Completed',
|
||||
Failed = 'Failed',
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Disposable, CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
@@ -9,8 +11,6 @@ import { Logger, ProgressReporter } from './logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from './pure/messages';
|
||||
import * as messages from './pure/messages';
|
||||
import { ProgressCallback, ProgressTask } from './commandRunner';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as helpers from './helpers';
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
@@ -68,7 +68,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.queryServerStartListeners.push(e);
|
||||
}
|
||||
|
||||
public activeQueryName: string | undefined;
|
||||
public activeQueryLogFile: string | undefined;
|
||||
|
||||
constructor(
|
||||
readonly config: QueryServerConfig,
|
||||
@@ -89,26 +89,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.evaluationResultCallbacks = {};
|
||||
}
|
||||
|
||||
async initLogger() {
|
||||
let storagePath = this.opts.contextStoragePath;
|
||||
let isCustomLogDirectory = false;
|
||||
if (this.config.customLogDirectory) {
|
||||
try {
|
||||
if (!(await fs.pathExists(this.config.customLogDirectory))) {
|
||||
await fs.mkdir(this.config.customLogDirectory);
|
||||
}
|
||||
void this.logger.log(`Saving query server logs to user-specified directory: ${this.config.customLogDirectory}.`);
|
||||
storagePath = this.config.customLogDirectory;
|
||||
isCustomLogDirectory = true;
|
||||
} catch (e) {
|
||||
void helpers.showAndLogErrorMessage(`${this.config.customLogDirectory} is not a valid directory. Logs will be stored in a temporary workspace directory instead.`);
|
||||
}
|
||||
}
|
||||
|
||||
await this.logger.setLogStoragePath(storagePath, isCustomLogDirectory);
|
||||
|
||||
}
|
||||
|
||||
get logger(): Logger {
|
||||
return this.opts.logger;
|
||||
}
|
||||
@@ -150,7 +130,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the given reporter. */
|
||||
private async startQueryServerImpl(progressReporter: ProgressReporter): Promise<void> {
|
||||
await this.initLogger();
|
||||
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
|
||||
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
|
||||
|
||||
@@ -167,16 +146,29 @@ export class QueryServerClient extends DisposableObject {
|
||||
args.push('--require-db-registration');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats()) {
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||
args.push('--old-eval-stats');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
|
||||
args.push('--evaluator-log');
|
||||
args.push(structuredLogFile);
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
args.push('5');
|
||||
}
|
||||
|
||||
if (this.config.debug) {
|
||||
args.push('--debug', '--tuple-counting');
|
||||
}
|
||||
|
||||
if (cli.shouldDebugQueryServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=n,quiet=y');
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
const child = cli.spawnServer(
|
||||
@@ -187,7 +179,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.logger,
|
||||
data => this.logger.log(data.toString(), {
|
||||
trailingNewline: false,
|
||||
additionalLogLocation: this.activeQueryName
|
||||
additionalLogLocation: this.activeQueryLogFile
|
||||
}),
|
||||
undefined, // no listener for stdout
|
||||
progressReporter
|
||||
@@ -198,10 +190,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
if (!(res.runId in this.evaluationResultCallbacks)) {
|
||||
void this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
||||
} else {
|
||||
const baseLocation = this.logger.getBaseLocation();
|
||||
if (baseLocation && this.activeQueryName) {
|
||||
res.logFileLocation = path.join(baseLocation, this.activeQueryName);
|
||||
}
|
||||
this.evaluationResultCallbacks[res.runId](res);
|
||||
}
|
||||
return {};
|
||||
@@ -234,7 +222,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
}
|
||||
|
||||
get serverProcessPid(): number {
|
||||
return this.serverProcess!.child.pid;
|
||||
return this.serverProcess!.child.pid || 0;
|
||||
}
|
||||
|
||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||
@@ -262,8 +250,23 @@ export class QueryServerClient extends DisposableObject {
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.compileQuery.method) {
|
||||
const queryPath = parameter?.queryToCheck?.queryPath || 'unknown';
|
||||
this.activeQueryName = `query-${path.basename(queryPath)}-${this.nextProgress}.log`;
|
||||
this.activeQueryLogFile = findQueryLogFile(path.dirname(parameter.resultPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
ProgressCallback,
|
||||
UserCancellationException
|
||||
} from './commandRunner';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const QUICK_QUERIES_DIR_NAME = 'quick-queries';
|
||||
const QUICK_QUERY_QUERY_NAME = 'quick-query.ql';
|
||||
@@ -132,7 +133,7 @@ export async function displayQuickQuery(
|
||||
await Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
throw new UserCancellationException(e.message);
|
||||
throw new UserCancellationException(getErrorMessage(e));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -1,31 +1,41 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { downloadArtifactFromLink } from './gh-actions-api-client';
|
||||
import * as path from 'path';
|
||||
import { AnalysisSummary } from './shared/remote-query-result';
|
||||
import { AnalysisResults, QueryResult } from './shared/analysis-result';
|
||||
import { AnalysisResults, AnalysisAlert, AnalysisRawResults } from './shared/analysis-result';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import * as os from 'os';
|
||||
import { sarifParser } from '../sarif-parser';
|
||||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { asyncFilter, getErrorMessage } from '../pure/helpers-pure';
|
||||
import { createDownloadPath } from './download-link';
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for a single remote query.
|
||||
private readonly analysesResults: AnalysisResults[];
|
||||
// Store for the results of various analyses for each remote query.
|
||||
// The key is the queryId and is also the name of the directory where results are stored.
|
||||
private readonly analysesResults: Map<string, AnalysisResults[]>;
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
readonly storagePath: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
this.analysesResults = [];
|
||||
this.analysesResults = new Map();
|
||||
}
|
||||
|
||||
public async downloadAnalysisResults(
|
||||
analysisSummary: AnalysisSummary,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
): Promise<void> {
|
||||
if (this.analysesResults.some(x => x.nwo === analysisSummary.nwo)) {
|
||||
// We already have the results for this analysis, don't download again.
|
||||
if (this.isAnalysisInMemory(analysisSummary)) {
|
||||
// We already have the results for this analysis in memory, don't download again.
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -36,11 +46,23 @@ export class AnalysesResultsManager {
|
||||
await this.downloadSingleAnalysisResults(analysisSummary, credentials, publishResults);
|
||||
}
|
||||
|
||||
public async downloadAnalysesResults(
|
||||
analysesToDownload: AnalysisSummary[],
|
||||
token: CancellationToken | undefined,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void> = () => Promise.resolve()
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToLoad.filter(x => !this.isAnalysisInMemory(x));
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
void this.logger.log('Downloading and processing analyses results');
|
||||
@@ -74,8 +96,16 @@ export class AnalysesResultsManager {
|
||||
}
|
||||
}
|
||||
|
||||
public getAnalysesResults(): AnalysisResults[] {
|
||||
return [...this.analysesResults];
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.internalGetAnalysesResults(queryId)];
|
||||
}
|
||||
|
||||
private internalGetAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return this.analysesResults.get(queryId) || [];
|
||||
}
|
||||
|
||||
public removeAnalysesResults(queryId: string) {
|
||||
this.analysesResults.delete(queryId);
|
||||
}
|
||||
|
||||
private async downloadSingleAnalysisResults(
|
||||
@@ -86,51 +116,87 @@ export class AnalysesResultsManager {
|
||||
const analysisResults: AnalysisResults = {
|
||||
nwo: analysis.nwo,
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
interpretedResults: []
|
||||
};
|
||||
|
||||
this.analysesResults.push(analysisResults);
|
||||
void publishResults(this.analysesResults);
|
||||
const queryId = analysis.downloadLink.queryId;
|
||||
const resultsForQuery = this.internalGetAnalysesResults(queryId);
|
||||
resultsForQuery.push(analysisResults);
|
||||
this.analysesResults.set(queryId, resultsForQuery);
|
||||
void publishResults([...resultsForQuery]);
|
||||
const pos = resultsForQuery.length - 1;
|
||||
|
||||
let artifactPath;
|
||||
try {
|
||||
artifactPath = await downloadArtifactFromLink(credentials, analysis.downloadLink);
|
||||
artifactPath = await downloadArtifactFromLink(credentials, this.storagePath, analysis.downloadLink);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${e.message}`);
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
if (path.extname(artifactPath) === '.sarif') {
|
||||
const queryResults = await this.readResults(artifactPath);
|
||||
analysisResults.results = queryResults;
|
||||
analysisResults.status = 'Completed';
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(analysis.nwo, analysis.databaseSha);
|
||||
|
||||
let newAnaysisResults: AnalysisResults;
|
||||
const fileExtension = path.extname(artifactPath);
|
||||
if (fileExtension === '.sarif') {
|
||||
const queryResults = await this.readSarifResults(artifactPath, fileLinkPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
interpretedResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else if (fileExtension === '.bqrs') {
|
||||
const queryResults = await this.readBqrsResults(artifactPath, fileLinkPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
rawResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else {
|
||||
void this.logger.log('Cannot download results. Only alert and path queries are fully supported.');
|
||||
analysisResults.status = 'Failed';
|
||||
void this.logger.log(`Cannot download results. File type '${fileExtension}' not supported.`);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
status: 'Failed'
|
||||
};
|
||||
}
|
||||
|
||||
void publishResults(this.analysesResults);
|
||||
resultsForQuery[pos] = newAnaysisResults;
|
||||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
private async readResults(filePath: string): Promise<QueryResult[]> {
|
||||
const queryResults: QueryResult[] = [];
|
||||
|
||||
public async loadDownloadedAnalyses(
|
||||
allAnalysesToCheck: AnalysisSummary[]
|
||||
) {
|
||||
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, x => this.isAnalysisDownloaded(x));
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(analysis: AnalysisSummary): Promise<boolean> {
|
||||
return await fs.pathExists(createDownloadPath(this.storagePath, analysis.downloadLink));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix);
|
||||
}
|
||||
|
||||
private async readSarifResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisAlert[]> {
|
||||
const sarifLog = await sarifParser(filePath);
|
||||
|
||||
// Read the sarif file and extract information that we want to display
|
||||
// in the UI. For now we're only getting the message texts but we'll gradually
|
||||
// extract more information based on the UX we want to build.
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(os.EOL)}`);
|
||||
}
|
||||
|
||||
sarifLog.runs?.forEach(run => {
|
||||
run?.results?.forEach(result => {
|
||||
if (result?.message?.text) {
|
||||
queryResults.push({
|
||||
message: result.message.text
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
return processedSarif.alerts;
|
||||
}
|
||||
|
||||
return queryResults;
|
||||
private isAnalysisInMemory(analysis: AnalysisSummary): boolean {
|
||||
return this.internalGetAnalysesResults(analysis.downloadLink.queryId).some(x => x.nwo === analysis.nwo);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(nwo: string, sha: string): string {
|
||||
return `https://github.com/${nwo}/blob/${sha}`;
|
||||
}
|
||||
}
|
||||
|
||||
35
extensions/ql-vscode/src/remote-queries/bqrs-processing.ts
Normal file
35
extensions/ql-vscode/src/remote-queries/bqrs-processing.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { Logger } from '../logging';
|
||||
import { transformBqrsResultSet } from '../pure/bqrs-cli-types';
|
||||
import { AnalysisRawResults } from './shared/analysis-result';
|
||||
import { MAX_RAW_RESULTS } from './shared/result-limits';
|
||||
|
||||
export async function extractRawResults(
|
||||
cliServer: CodeQLCliServer,
|
||||
logger: Logger,
|
||||
filePath: string,
|
||||
fileLinkPrefix: string,
|
||||
): Promise<AnalysisRawResults> {
|
||||
const bqrsInfo = await cliServer.bqrsInfo(filePath);
|
||||
const resultSets = bqrsInfo['result-sets'];
|
||||
|
||||
if (resultSets.length < 1) {
|
||||
throw new Error('No result sets found in results file.');
|
||||
}
|
||||
if (resultSets.length > 1) {
|
||||
void logger.log('Multiple result sets found in results file. Only the first one will be used.');
|
||||
}
|
||||
|
||||
const schema = resultSets[0];
|
||||
|
||||
const chunk = await cliServer.bqrsDecode(
|
||||
filePath,
|
||||
schema.name,
|
||||
{ pageSize: MAX_RAW_RESULTS });
|
||||
|
||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||
|
||||
const capped = !!chunk.next;
|
||||
|
||||
return { schema, resultSet, fileLinkPrefix, capped };
|
||||
}
|
||||
@@ -1,15 +1,17 @@
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
*/
|
||||
export interface DownloadLink {
|
||||
/**
|
||||
* A unique id of the artifact being downloaded.
|
||||
* A unique id of the artifact being downloaded.
|
||||
*/
|
||||
id: string;
|
||||
|
||||
/**
|
||||
* The URL path to use against the GitHub API to download the
|
||||
* linked artifact.
|
||||
* linked artifact.
|
||||
*/
|
||||
urlPath: string;
|
||||
|
||||
@@ -17,4 +19,22 @@ export interface DownloadLink {
|
||||
* An optional path to follow inside the downloaded archive containing the artifact.
|
||||
*/
|
||||
innerFilePath?: string;
|
||||
|
||||
/**
|
||||
* A unique id of the remote query run. This is used to determine where to store artifacts and data from the run.
|
||||
*/
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a downloadLink to the path where the artifact should be stored.
|
||||
*
|
||||
* @param storagePath The base directory to store artifacts in.
|
||||
* @param downloadLink The DownloadLink
|
||||
* @param extension An optional file extension to append to the artifact (no `.`).
|
||||
*
|
||||
* @returns A full path to the download location of the artifact
|
||||
*/
|
||||
export function createDownloadPath(storagePath: string, downloadLink: DownloadLink, extension = '') {
|
||||
return path.join(storagePath, downloadLink.queryId, downloadLink.id + (extension ? `.${extension}` : ''));
|
||||
}
|
||||
|
||||
129
extensions/ql-vscode/src/remote-queries/export-results.ts
Normal file
129
extensions/ql-vscode/src/remote-queries/export-results.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { window, commands, Uri, ExtensionContext, QuickPickItem, workspace, ViewColumn } from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import { showInformationMessageWithAction } from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { createGist } from './gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries-manager';
|
||||
import { generateMarkdown } from './remote-queries-markdown-generation';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
/**
|
||||
* Exports the results of the currently-selected remote query.
|
||||
* The user is prompted to select the export format.
|
||||
*/
|
||||
export async function exportRemoteQueryResults(
|
||||
queryHistoryManager: QueryHistoryManager,
|
||||
remoteQueriesManager: RemoteQueriesManager,
|
||||
ctx: ExtensionContext,
|
||||
): Promise<void> {
|
||||
const queryHistoryItem = queryHistoryManager.getCurrentQueryHistoryItem();
|
||||
if (!queryHistoryItem || queryHistoryItem.t !== 'remote') {
|
||||
throw new Error('No variant analysis results currently open. To open results, click an item in the query history view.');
|
||||
} else if (!queryHistoryItem.completed) {
|
||||
throw new Error('Variant analysis results are not yet available.');
|
||||
}
|
||||
const queryId = queryHistoryItem.queryId;
|
||||
void logger.log(`Exporting variant analysis results for query: ${queryId}`);
|
||||
const query = queryHistoryItem.remoteQuery;
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(queryId);
|
||||
|
||||
const gistOption = {
|
||||
label: '$(ports-open-browser-icon) Create Gist (GitHub)',
|
||||
};
|
||||
const localMarkdownOption = {
|
||||
label: '$(markdown) Save as markdown',
|
||||
};
|
||||
const exportFormat = await determineExportFormat(gistOption, localMarkdownOption);
|
||||
|
||||
if (exportFormat === gistOption) {
|
||||
await exportResultsToGist(ctx, query, analysesResults);
|
||||
} else if (exportFormat === localMarkdownOption) {
|
||||
const queryDirectoryPath = await queryHistoryManager.getQueryHistoryItemDirectory(
|
||||
queryHistoryItem
|
||||
);
|
||||
await exportResultsToLocalMarkdown(queryDirectoryPath, query, analysesResults);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the format in which to export the results, from the given export options.
|
||||
*/
|
||||
async function determineExportFormat(
|
||||
...options: { label: string }[]
|
||||
): Promise<QuickPickItem> {
|
||||
const exportFormat = await window.showQuickPick(
|
||||
options,
|
||||
{
|
||||
placeHolder: 'Select export format',
|
||||
canPickMany: false,
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
if (!exportFormat || !exportFormat.label) {
|
||||
throw new UserCancellationException('No export format selected', true);
|
||||
}
|
||||
return exportFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and uploads the files as a secret gist.
|
||||
*/
|
||||
async function exportResultsToGist(
|
||||
ctx: ExtensionContext,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const description = 'CodeQL Variant Analysis Results';
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'gist');
|
||||
// Convert markdownFiles to the appropriate format for uploading to gist
|
||||
const gistFiles = markdownFiles.reduce((acc, cur) => {
|
||||
acc[`${cur.fileName}.md`] = { content: cur.content.join('\n') };
|
||||
return acc;
|
||||
}, {} as { [key: string]: { content: string } });
|
||||
|
||||
const gistUrl = await createGist(credentials, description, gistFiles);
|
||||
if (gistUrl) {
|
||||
const shouldOpenGist = await showInformationMessageWithAction(
|
||||
'Variant analysis results exported to gist.',
|
||||
'Open gist'
|
||||
);
|
||||
if (shouldOpenGist) {
|
||||
await commands.executeCommand('vscode.open', Uri.parse(gistUrl));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and saves the files locally
|
||||
* in the query directory (where query results and metadata are also saved).
|
||||
*/
|
||||
async function exportResultsToLocalMarkdown(
|
||||
queryDirectoryPath: string,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
) {
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'local');
|
||||
const exportedResultsPath = path.join(queryDirectoryPath, 'exported-results');
|
||||
await fs.ensureDir(exportedResultsPath);
|
||||
for (const markdownFile of markdownFiles) {
|
||||
const filePath = path.join(exportedResultsPath, `${markdownFile.fileName}.md`);
|
||||
await fs.writeFile(filePath, markdownFile.content.join('\n'), 'utf8');
|
||||
}
|
||||
const shouldOpenExportedResults = await showInformationMessageWithAction(
|
||||
`Variant analysis results exported to \"${exportedResultsPath}\".`,
|
||||
'Open exported results'
|
||||
);
|
||||
if (shouldOpenExportedResults) {
|
||||
const summaryFilePath = path.join(exportedResultsPath, '_summary.md');
|
||||
const summaryFile = await workspace.openTextDocument(summaryFilePath);
|
||||
await window.showTextDocument(summaryFile, ViewColumn.One);
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(summaryFilePath));
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,34 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { showAndLogWarningMessage } from '../helpers';
|
||||
import { showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { tmpDir } from '../run-queries';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryResultIndex, RemoteQueryResultIndexItem } from './remote-query-result-index';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
|
||||
interface ApiResultIndexItem {
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
sha?: string;
|
||||
results_count: number;
|
||||
bqrs_file_size: number;
|
||||
sarif_file_size?: number;
|
||||
}
|
||||
|
||||
interface ApiFailureIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
interface ApiResultIndex {
|
||||
successes: ApiSuccessIndexItem[];
|
||||
failures: ApiFailureIndexItem[];
|
||||
}
|
||||
|
||||
export async function getRemoteQueryIndex(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery
|
||||
@@ -31,47 +42,75 @@ export async function getRemoteQueryIndex(
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repoName, workflowRunId);
|
||||
const resultIndexArtifactId = getArtifactIDfromName('result-index', workflowUri, artifactList);
|
||||
const resultIndexItems = await getResultIndexItems(credentials, owner, repoName, resultIndexArtifactId);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName('result-index', artifactList);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await getResultIndex(credentials, owner, repoName, resultIndexArtifactId);
|
||||
|
||||
const items = resultIndexItems.map(item => {
|
||||
const successes = resultIndex?.successes.map(item => {
|
||||
const artifactId = getArtifactIDfromName(item.id, workflowUri, artifactList);
|
||||
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
artifactId: artifactId,
|
||||
nwo: item.nwo,
|
||||
sha: item.sha,
|
||||
resultCount: item.results_count,
|
||||
bqrsFileSize: item.bqrs_file_size,
|
||||
sarifFileSize: item.sarif_file_size,
|
||||
} as RemoteQueryResultIndexItem;
|
||||
sarifFileSize: item.sarif_file_size
|
||||
} as RemoteQuerySuccessIndexItem;
|
||||
});
|
||||
|
||||
const failures = resultIndex?.failures.map(item => {
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
nwo: item.nwo,
|
||||
error: item.error
|
||||
} as RemoteQueryFailureIndexItem;
|
||||
});
|
||||
|
||||
return {
|
||||
artifactsUrlPath,
|
||||
items
|
||||
successes: successes || [],
|
||||
failures: failures || []
|
||||
};
|
||||
}
|
||||
|
||||
export async function cancelRemoteQuery(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery
|
||||
): Promise<void> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const { actionsWorkflowRunId, controllerRepository: { owner, name } } = remoteQuery;
|
||||
const response = await octokit.request(`POST /repos/${owner}/${name}/actions/runs/${actionsWorkflowRunId}/cancel`);
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error cancelling variant analysis: ${response.status} ${response?.data?.message || ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadArtifactFromLink(
|
||||
credentials: Credentials,
|
||||
storagePath: string,
|
||||
downloadLink: DownloadLink
|
||||
): Promise<string> {
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
const zipFilePath = path.join(tmpDir.name, `${downloadLink.id}.zip`);
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
// Extract the zipped artifact.
|
||||
const extractedPath = path.join(tmpDir.name, downloadLink.id);
|
||||
await unzipFile(zipFilePath, extractedPath);
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
return downloadLink.innerFilePath
|
||||
? path.join(extractedPath, downloadLink.innerFilePath)
|
||||
: extractedPath;
|
||||
// Extract the zipped artifact.
|
||||
await unzipFile(zipFilePath, extractedPath);
|
||||
}
|
||||
return path.join(extractedPath, downloadLink.innerFilePath || '');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,17 +121,17 @@ export async function downloadArtifactFromLink(
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns An object containing the result index.
|
||||
*/
|
||||
async function getResultIndexItems(
|
||||
async function getResultIndex(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number
|
||||
): Promise<ApiResultIndexItem[]> {
|
||||
): Promise<ApiResultIndex | undefined> {
|
||||
const artifactPath = await downloadArtifact(credentials, owner, repo, artifactId);
|
||||
const indexFilePath = path.join(artifactPath, 'index.json');
|
||||
if (!(await fs.pathExists(indexFilePath))) {
|
||||
void showAndLogWarningMessage('Could not find an `index.json` file in the result artifact.');
|
||||
return [];
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await fs.readFile(path.join(artifactPath, 'index.json'), 'utf8');
|
||||
|
||||
@@ -106,8 +145,8 @@ async function getResultIndexItems(
|
||||
/**
|
||||
* Gets the status of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns The workflow run status.
|
||||
*/
|
||||
@@ -187,15 +226,29 @@ function getArtifactIDfromName(
|
||||
workflowUri: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number {
|
||||
const artifact = artifacts.find(a => a.name === artifactName);
|
||||
const artifactId = tryGetArtifactIDfromName(artifactName, artifacts);
|
||||
|
||||
if (!artifact) {
|
||||
if (!artifactId) {
|
||||
const errorMessage =
|
||||
`Could not find artifact with name ${artifactName} in workflow ${workflowUri}.
|
||||
Please check whether the workflow run has successfully completed.`;
|
||||
throw Error(errorMessage);
|
||||
}
|
||||
|
||||
return artifactId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name, if it exists.
|
||||
*/
|
||||
function tryGetArtifactIDfromName(
|
||||
artifactName: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number | undefined {
|
||||
const artifact = artifacts.find(a => a.name === artifactName);
|
||||
|
||||
return artifact?.id;
|
||||
}
|
||||
|
||||
@@ -243,18 +296,39 @@ function getWorkflowError(conclusion: string | null): string {
|
||||
}
|
||||
|
||||
if (conclusion === 'cancelled') {
|
||||
return 'The remote query execution was cancelled.';
|
||||
return 'Variant analysis execution was cancelled.';
|
||||
}
|
||||
|
||||
if (conclusion === 'timed_out') {
|
||||
return 'The remote query execution timed out.';
|
||||
return 'Variant analysis execution timed out.';
|
||||
}
|
||||
|
||||
if (conclusion === 'failure') {
|
||||
// TODO: Get the actual error from the workflow or potentially
|
||||
// from an artifact from the action itself.
|
||||
return 'The remote query execution has failed.';
|
||||
return 'Variant analysis execution has failed.';
|
||||
}
|
||||
|
||||
return `Unexpected query execution conclusion: ${conclusion}`;
|
||||
return `Unexpected variant analysis execution conclusion: ${conclusion}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a gist with the given description and files.
|
||||
* Returns the URL of the created gist.
|
||||
*/
|
||||
export async function createGist(
|
||||
credentials: Credentials,
|
||||
description: string,
|
||||
files: { [key: string]: { content: string } }
|
||||
): Promise<string | undefined> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('POST /gists', {
|
||||
description,
|
||||
files,
|
||||
public: false,
|
||||
});
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error exporting variant analysis results: ${response.status} ${response?.data || ''}`);
|
||||
}
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
@@ -5,18 +5,15 @@ import {
|
||||
ViewColumn,
|
||||
Uri,
|
||||
workspace,
|
||||
extensions,
|
||||
commands,
|
||||
commands
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { tmpDir } from '../run-queries';
|
||||
import {
|
||||
ToRemoteQueriesMessage,
|
||||
FromRemoteQueriesMessage,
|
||||
RemoteQueryDownloadAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage,
|
||||
RemoteQueryViewAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
} from '../pure/interface-types';
|
||||
import { Logger } from '../logging';
|
||||
import { getHtmlForWebview } from '../interface-utils';
|
||||
@@ -34,6 +31,7 @@ import { AnalysisResults } from './shared/analysis-result';
|
||||
export class RemoteQueriesInterfaceManager {
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
private currentQueryId: string | undefined;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
@@ -42,7 +40,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
private readonly analysesResultsManager: AnalysesResultsManager
|
||||
) {
|
||||
this.panelLoadedCallBacks.push(() => {
|
||||
void logger.log('Remote queries view loaded');
|
||||
void logger.log('Variant analysis results view loaded');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -50,12 +48,18 @@ export class RemoteQueriesInterfaceManager {
|
||||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
this.currentQueryId = queryResult.queryId;
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setRemoteQueryResult',
|
||||
queryResult: this.buildViewModel(query, queryResult)
|
||||
queryResult: model
|
||||
});
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults());
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(model.analysisSummaries);
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId), queryResult.queryId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -71,6 +75,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const executionDuration = this.getDuration(queryResult.executionEndTime, query.executionStartTime);
|
||||
const analysisSummaries = this.buildAnalysisSummaries(queryResult.analysisSummaries);
|
||||
const totalRepositoryCount = queryResult.analysisSummaries.length;
|
||||
const affectedRepositories = queryResult.analysisSummaries.filter(r => r.resultCount > 0);
|
||||
|
||||
return {
|
||||
@@ -78,12 +83,15 @@ export class RemoteQueriesInterfaceManager {
|
||||
queryFileName: queryFileName,
|
||||
queryFilePath: query.queryFilePath,
|
||||
queryText: query.queryText,
|
||||
totalRepositoryCount: query.repositories.length,
|
||||
language: query.language,
|
||||
workflowRunUrl: `https://github.com/${query.controllerRepository.owner}/${query.controllerRepository.name}/actions/runs/${query.actionsWorkflowRunId}`,
|
||||
totalRepositoryCount: totalRepositoryCount,
|
||||
affectedRepositoryCount: affectedRepositories.length,
|
||||
totalResultCount: totalResultCount,
|
||||
executionTimestamp: this.formatDate(query.executionStartTime),
|
||||
executionDuration: executionDuration,
|
||||
analysisSummaries: analysisSummaries
|
||||
analysisSummaries: analysisSummaries,
|
||||
analysisFailures: queryResult.analysisFailures,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -92,14 +100,14 @@ export class RemoteQueriesInterfaceManager {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'remoteQueriesView',
|
||||
'Remote Query Results',
|
||||
'CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(this.analysesResultsManager.storagePath),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
],
|
||||
}
|
||||
@@ -107,6 +115,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.currentQueryId = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
@@ -187,7 +196,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
break;
|
||||
case 'remoteQueryError':
|
||||
void this.logger.log(
|
||||
`Remote query error: ${msg.error}`
|
||||
`Variant analysis error: ${msg.error}`
|
||||
);
|
||||
break;
|
||||
case 'openFile':
|
||||
@@ -202,8 +211,8 @@ export class RemoteQueriesInterfaceManager {
|
||||
case 'remoteQueryDownloadAllAnalysesResults':
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case 'remoteQueryViewAnalysisResults':
|
||||
await this.viewAnalysisResults(msg);
|
||||
case 'remoteQueryExportResults':
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults');
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
@@ -211,45 +220,25 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
|
||||
private async downloadAnalysisResults(msg: RemoteQueryDownloadAnalysisResultsMessage): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.downloadAnalysisResults(
|
||||
msg.analysisSummary,
|
||||
results => this.setAnalysisResults(results));
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
private async downloadAllAnalysesResults(msg: RemoteQueryDownloadAllAnalysesResultsMessage): Promise<void> {
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
results => this.setAnalysisResults(results));
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
private async viewAnalysisResults(msg: RemoteQueryViewAnalysisResultsMessage): Promise<void> {
|
||||
const downloadLink = msg.analysisSummary.downloadLink;
|
||||
const filePath = path.join(tmpDir.name, downloadLink.id, downloadLink.innerFilePath || '');
|
||||
|
||||
const sarifViewerExtensionId = 'MS-SarifVSCode.sarif-viewer';
|
||||
|
||||
const sarifExt = extensions.getExtension(sarifViewerExtensionId);
|
||||
if (!sarifExt) {
|
||||
// Ask the user if they want to install the extension to view the results.
|
||||
void commands.executeCommand('workbench.extensions.installExtension', sarifViewerExtensionId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!sarifExt.isActive) {
|
||||
await sarifExt.activate();
|
||||
}
|
||||
|
||||
await sarifExt.exports.openLogs([
|
||||
Uri.file(filePath),
|
||||
]);
|
||||
}
|
||||
|
||||
public async setAnalysisResults(analysesResults: AnalysisResults[]): Promise<void> {
|
||||
if (this.panel?.active) {
|
||||
public async setAnalysisResults(analysesResults: AnalysisResults[], queryId: string | undefined): Promise<void> {
|
||||
if (this.panel?.active && this.currentQueryId === queryId) {
|
||||
await this.postMessage({
|
||||
t: 'setAnalysesResults',
|
||||
analysesResults: analysesResults
|
||||
analysesResults
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -258,8 +247,8 @@ export class RemoteQueriesInterfaceManager {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private getDuration(startTime: Date, endTime: Date): string {
|
||||
const diffInMs = startTime.getTime() - endTime.getTime();
|
||||
private getDuration(startTime: number, endTime: number): string {
|
||||
const diffInMs = startTime - endTime;
|
||||
return this.formatDuration(diffInMs);
|
||||
}
|
||||
|
||||
@@ -279,7 +268,8 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
}
|
||||
|
||||
private formatDate = (d: Date): string => {
|
||||
private formatDate = (millis: number): string => {
|
||||
const d = new Date(millis);
|
||||
const datePart = d.toLocaleDateString(undefined, { day: 'numeric', month: 'short' });
|
||||
const timePart = d.toLocaleTimeString(undefined, { hour: 'numeric', minute: 'numeric', hour12: true });
|
||||
return `${datePart} at ${timePart}`;
|
||||
@@ -313,6 +303,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
|
||||
return sortedAnalysisSummaries.map((analysisResult) => ({
|
||||
nwo: analysisResult.nwo,
|
||||
databaseSha: analysisResult.databaseSha || 'HEAD',
|
||||
resultCount: analysisResult.resultCount,
|
||||
downloadLink: analysisResult.downloadLink,
|
||||
fileSize: this.formatFileSize(analysisResult.fileSizeInBytes)
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
import { CancellationToken, commands, ExtensionContext, Uri, window } from 'vscode';
|
||||
import { nanoid } from 'nanoid';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { showAndLogErrorMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { createTimestampFile, showAndLogErrorMessage, showAndLogInformationMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { Logger } from '../logging';
|
||||
import { runRemoteQuery } from './run-remote-query';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries-interface';
|
||||
@@ -13,11 +17,19 @@ import { RemoteQueryResultIndex } from './remote-query-result-index';
|
||||
import { RemoteQueryResult } from './remote-query-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { RemoteQueryHistoryItem } from './remote-query-history-item';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { QueryHistoryInfo } from '../query-results';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
const autoDownloadMaxSize = 300 * 1024;
|
||||
const autoDownloadMaxCount = 100;
|
||||
|
||||
export class RemoteQueriesManager {
|
||||
const noop = () => { /* do nothing */ };
|
||||
export class RemoteQueriesManager extends DisposableObject {
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly interfaceManager: RemoteQueriesInterfaceManager;
|
||||
@@ -25,11 +37,55 @@ export class RemoteQueriesManager {
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly qhm: QueryHistoryManager,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, logger);
|
||||
super();
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, cliServer, storagePath, logger);
|
||||
this.interfaceManager = new RemoteQueriesInterfaceManager(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(ctx, logger);
|
||||
|
||||
// Handle events from the query history manager
|
||||
this.push(this.qhm.onDidAddQueryItem(this.handleAddQueryItem.bind(this)));
|
||||
this.push(this.qhm.onDidRemoveQueryItem(this.handleRemoveQueryItem.bind(this)));
|
||||
this.push(this.qhm.onWillOpenQueryItem(this.handleOpenQueryItem.bind(this)));
|
||||
}
|
||||
|
||||
private async handleAddQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
if (!(await this.queryHistoryItemExists(queryItem))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace. We should remove it from the history manager.
|
||||
await this.qhm.handleRemoveHistoryItem(queryItem);
|
||||
} else if (queryItem.status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
await commands.executeCommand('codeQL.monitorRemoteQuery', queryItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async handleRemoveQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
this.analysesResultsManager.removeAnalysesResults(queryItem.queryId);
|
||||
await this.removeStorageDirectory(queryItem);
|
||||
}
|
||||
}
|
||||
|
||||
private async handleOpenQueryItem(queryItem: QueryHistoryInfo) {
|
||||
if (queryItem?.t === 'remote') {
|
||||
try {
|
||||
const remoteQueryResult = await this.retrieveJsonFile(queryItem, 'query-result.json') as RemoteQueryResult;
|
||||
// open results in the background
|
||||
void this.openResults(queryItem.remoteQuery, remoteQueryResult).then(
|
||||
noop,
|
||||
err => void showAndLogErrorMessage(err)
|
||||
);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Could not open query results. ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async runRemoteQuery(
|
||||
@@ -46,47 +102,62 @@ export class RemoteQueriesManager {
|
||||
progress,
|
||||
token);
|
||||
|
||||
if (querySubmission && querySubmission.query) {
|
||||
void commands.executeCommand('codeQL.monitorRemoteQuery', querySubmission.query);
|
||||
if (querySubmission?.query) {
|
||||
const query = querySubmission.query;
|
||||
const queryId = this.createQueryId(query.queryName);
|
||||
|
||||
const queryHistoryItem: RemoteQueryHistoryItem = {
|
||||
t: 'remote',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
queryId,
|
||||
remoteQuery: query,
|
||||
};
|
||||
await this.prepareStorageDirectory(queryHistoryItem);
|
||||
await this.storeJsonFile(queryHistoryItem, 'query.json', query);
|
||||
|
||||
this.qhm.addQuery(queryHistoryItem);
|
||||
await this.qhm.refreshTreeView();
|
||||
}
|
||||
}
|
||||
|
||||
public async monitorRemoteQuery(
|
||||
query: RemoteQuery,
|
||||
queryItem: RemoteQueryHistoryItem,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
const queryResult = await this.remoteQueriesMonitor.monitorQuery(query, cancellationToken);
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(queryItem.remoteQuery, cancellationToken);
|
||||
|
||||
const executionEndTime = new Date();
|
||||
const executionEndTime = Date.now();
|
||||
|
||||
if (queryResult.status === 'CompletedSuccessfully') {
|
||||
const resultIndex = await getRemoteQueryIndex(credentials, query);
|
||||
if (!resultIndex) {
|
||||
void showAndLogErrorMessage(`There was an issue retrieving the result for the query ${query.queryName}`);
|
||||
return;
|
||||
if (queryWorkflowResult.status === 'CompletedSuccessfully') {
|
||||
await this.downloadAvailableResults(queryItem, credentials, executionEndTime);
|
||||
} else if (queryWorkflowResult.status === 'CompletedUnsuccessfully') {
|
||||
if (queryWorkflowResult.error?.includes('cancelled')) {
|
||||
// workflow was cancelled on the server
|
||||
queryItem.failureReason = 'Cancelled';
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
await this.downloadAvailableResults(queryItem, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else {
|
||||
queryItem.failureReason = queryWorkflowResult.error;
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
void showAndLogErrorMessage(`Variant analysis execution failed. Error: ${queryWorkflowResult.error}`);
|
||||
}
|
||||
|
||||
const queryResult = this.mapQueryResult(executionEndTime, resultIndex);
|
||||
|
||||
// Kick off auto-download of results.
|
||||
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
|
||||
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const message = `Query "${query.queryName}" run on ${query.repositories.length} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
|
||||
if (shouldOpenView) {
|
||||
await this.interfaceManager.showResults(query, queryResult);
|
||||
|
||||
}
|
||||
} else if (queryResult.status === 'CompletedUnsuccessfully') {
|
||||
await showAndLogErrorMessage(`Remote query execution failed. Error: ${queryResult.error}`);
|
||||
return;
|
||||
} else if (queryResult.status === 'Cancelled') {
|
||||
await showAndLogErrorMessage('Remote query monitoring was cancelled');
|
||||
} else if (queryWorkflowResult.status === 'Cancelled') {
|
||||
queryItem.failureReason = 'Cancelled';
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
await this.downloadAvailableResults(queryItem, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else if (queryWorkflowResult.status === 'InProgress') {
|
||||
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
|
||||
void showAndLogErrorMessage(`Unexpected status: ${queryWorkflowResult.status}`);
|
||||
} else {
|
||||
// Ensure all cases are covered
|
||||
assertNever(queryWorkflowResult.status);
|
||||
}
|
||||
await this.qhm.refreshTreeView();
|
||||
}
|
||||
|
||||
public async autoDownloadRemoteQueryResults(
|
||||
@@ -98,32 +169,143 @@ export class RemoteQueriesManager {
|
||||
.slice(0, autoDownloadMaxCount)
|
||||
.map(a => ({
|
||||
nwo: a.nwo,
|
||||
databaseSha: a.databaseSha,
|
||||
resultCount: a.resultCount,
|
||||
downloadLink: a.downloadLink,
|
||||
fileSize: String(a.fileSizeInBytes)
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results));
|
||||
results => this.interfaceManager.setAnalysisResults(results, queryResult.queryId));
|
||||
}
|
||||
|
||||
private mapQueryResult(executionEndTime: Date, resultIndex: RemoteQueryResultIndex): RemoteQueryResult {
|
||||
const analysisSummaries = resultIndex.items.map(item => ({
|
||||
private mapQueryResult(executionEndTime: number, resultIndex: RemoteQueryResultIndex, queryId: string): RemoteQueryResult {
|
||||
|
||||
const analysisSummaries = resultIndex.successes.map(item => ({
|
||||
nwo: item.nwo,
|
||||
databaseSha: item.sha || 'HEAD',
|
||||
resultCount: item.resultCount,
|
||||
fileSizeInBytes: item.sarifFileSize ? item.sarifFileSize : item.bqrsFileSize,
|
||||
downloadLink: {
|
||||
id: item.artifactId.toString(),
|
||||
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
|
||||
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs'
|
||||
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs',
|
||||
queryId,
|
||||
} as DownloadLink
|
||||
}));
|
||||
const analysisFailures = resultIndex.failures.map(item => ({
|
||||
nwo: item.nwo,
|
||||
error: item.error
|
||||
}));
|
||||
|
||||
return {
|
||||
executionEndTime,
|
||||
analysisSummaries
|
||||
analysisSummaries,
|
||||
analysisFailures,
|
||||
queryId
|
||||
};
|
||||
}
|
||||
|
||||
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
await this.interfaceManager.showResults(query, queryResult);
|
||||
}
|
||||
|
||||
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
|
||||
if (shouldOpenView) {
|
||||
await this.openResults(query, queryResult);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique id for this query, suitable for determining the storage location for the downloaded query artifacts.
|
||||
* @param queryName
|
||||
* @returns
|
||||
*/
|
||||
private createQueryId(queryName: string): string {
|
||||
return `${queryName}-${nanoid()}`;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a directory for storing analysis results for a single query run.
|
||||
* This directory contains a timestamp file, which will be
|
||||
* used by the query history manager to determine when the directory
|
||||
* should be deleted.
|
||||
*
|
||||
* @param queryName The name of the query that was run.
|
||||
*/
|
||||
private async prepareStorageDirectory(queryHistoryItem: RemoteQueryHistoryItem): Promise<void> {
|
||||
await createTimestampFile(path.join(this.storagePath, queryHistoryItem.queryId));
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string, obj: T): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
|
||||
await fs.writeFile(filePath, JSON.stringify(obj, null, 2), 'utf8');
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(queryHistoryItem: RemoteQueryHistoryItem, fileName: string): Promise<T> {
|
||||
const filePath = path.join(this.storagePath, queryHistoryItem.queryId, fileName);
|
||||
return JSON.parse(await fs.readFile(filePath, 'utf8'));
|
||||
}
|
||||
|
||||
private async removeStorageDirectory(queryItem: RemoteQueryHistoryItem): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryItem.queryId);
|
||||
await fs.remove(filePath);
|
||||
}
|
||||
|
||||
private async queryHistoryItemExists(queryItem: RemoteQueryHistoryItem): Promise<boolean> {
|
||||
const filePath = path.join(this.storagePath, queryItem.queryId);
|
||||
return await fs.pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether there's a result index artifact available for the given query.
|
||||
* If so, set the query status to `Completed` and auto-download the results.
|
||||
*/
|
||||
private async downloadAvailableResults(
|
||||
queryItem: RemoteQueryHistoryItem,
|
||||
credentials: Credentials,
|
||||
executionEndTime: number
|
||||
): Promise<void> {
|
||||
const resultIndex = await getRemoteQueryIndex(credentials, queryItem.remoteQuery);
|
||||
if (resultIndex) {
|
||||
queryItem.completed = true;
|
||||
queryItem.status = QueryStatus.Completed;
|
||||
queryItem.failureReason = undefined;
|
||||
const queryResult = this.mapQueryResult(executionEndTime, resultIndex, queryItem.queryId);
|
||||
|
||||
await this.storeJsonFile(queryItem, 'query-result.json', queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(queryItem.remoteQuery, queryResult).then(
|
||||
noop,
|
||||
err => {
|
||||
void showAndLogErrorMessage(err);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const controllerRepo = `${queryItem.remoteQuery.controllerRepository.owner}/${queryItem.remoteQuery.controllerRepository.name}`;
|
||||
const workflowRunUrl = `https://github.com/${controllerRepo}/actions/runs/${queryItem.remoteQuery.actionsWorkflowRunId}`;
|
||||
void showAndLogErrorMessage(
|
||||
`There was an issue retrieving the result for the query [${queryItem.remoteQuery.queryName}](${workflowRunUrl}).`
|
||||
);
|
||||
queryItem.status = QueryStatus.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
// Pulled from the analysis results manager, so that we can get access to
|
||||
// analyses results from the "export results" command.
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.analysesResultsManager.getAnalysesResults(queryId)];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,327 @@
|
||||
import { CellValue } from '../pure/bqrs-cli-types';
|
||||
import { tryGetRemoteLocation } from '../pure/bqrs-utils';
|
||||
import { createRemoteFileRef } from '../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../pure/sarif-utils';
|
||||
import { convertNonPrintableChars } from '../text-utils';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisAlert, AnalysisRawResults, AnalysisResults, CodeSnippet, FileLink, getAnalysisResultCount, HighlightedRegion } from './shared/analysis-result';
|
||||
|
||||
export type MarkdownLinkType = 'local' | 'gist';
|
||||
|
||||
export interface MarkdownFile {
|
||||
fileName: string;
|
||||
content: string[]; // Each array item is a line of the markdown file.
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates markdown files with variant analysis results.
|
||||
*/
|
||||
export function generateMarkdown(
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
linkType: MarkdownLinkType
|
||||
): MarkdownFile[] {
|
||||
const resultsFiles: MarkdownFile[] = [];
|
||||
// Generate summary file with links to individual files
|
||||
const summaryFile: MarkdownFile = generateMarkdownSummary(query);
|
||||
for (const analysisResult of analysesResults) {
|
||||
const resultsCount = getAnalysisResultCount(analysisResult);
|
||||
if (resultsCount === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append nwo and results count to the summary table
|
||||
const nwo = analysisResult.nwo;
|
||||
const fileName = createFileName(nwo);
|
||||
const link = createRelativeLink(fileName, linkType);
|
||||
summaryFile.content.push(`| ${nwo} | [${resultsCount} result(s)](${link}) |`);
|
||||
|
||||
// Generate individual markdown file for each repository
|
||||
const resultsFileContent = [
|
||||
`### ${analysisResult.nwo}`,
|
||||
''
|
||||
];
|
||||
for (const interpretedResult of analysisResult.interpretedResults) {
|
||||
const individualResult = generateMarkdownForInterpretedResult(interpretedResult, query.language);
|
||||
resultsFileContent.push(...individualResult);
|
||||
}
|
||||
if (analysisResult.rawResults) {
|
||||
const rawResultTable = generateMarkdownForRawResults(analysisResult.rawResults);
|
||||
resultsFileContent.push(...rawResultTable);
|
||||
}
|
||||
resultsFiles.push({
|
||||
fileName: fileName,
|
||||
content: resultsFileContent,
|
||||
});
|
||||
}
|
||||
return [summaryFile, ...resultsFiles];
|
||||
}
|
||||
|
||||
export function generateMarkdownSummary(query: RemoteQuery): MarkdownFile {
|
||||
const lines: string[] = [];
|
||||
// Title
|
||||
lines.push(
|
||||
`### Results for "${query.queryName}"`,
|
||||
''
|
||||
);
|
||||
|
||||
// Expandable section containing query text
|
||||
const queryCodeBlock = [
|
||||
'```ql',
|
||||
...query.queryText.split('\n'),
|
||||
'```',
|
||||
];
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection('Query', queryCodeBlock)
|
||||
);
|
||||
|
||||
// Padding between sections
|
||||
lines.push(
|
||||
'<br />',
|
||||
'',
|
||||
);
|
||||
|
||||
// Summary table
|
||||
lines.push(
|
||||
'### Summary',
|
||||
'',
|
||||
'| Repository | Results |',
|
||||
'| --- | --- |',
|
||||
);
|
||||
// nwo and result count will be appended to this table
|
||||
return {
|
||||
fileName: '_summary',
|
||||
content: lines
|
||||
};
|
||||
}
|
||||
|
||||
function generateMarkdownForInterpretedResult(interpretedResult: AnalysisAlert, language: string): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push(createMarkdownRemoteFileRef(
|
||||
interpretedResult.fileLink,
|
||||
interpretedResult.highlightedRegion?.startLine,
|
||||
interpretedResult.highlightedRegion?.endLine
|
||||
));
|
||||
lines.push('');
|
||||
const codeSnippet = interpretedResult.codeSnippet;
|
||||
const highlightedRegion = interpretedResult.highlightedRegion;
|
||||
if (codeSnippet) {
|
||||
lines.push(
|
||||
...generateMarkdownForCodeSnippet(codeSnippet, language, highlightedRegion),
|
||||
);
|
||||
}
|
||||
const alertMessage = generateMarkdownForAlertMessage(interpretedResult);
|
||||
lines.push(alertMessage, '');
|
||||
|
||||
// If available, show paths
|
||||
const hasPathResults = interpretedResult.codeFlows.length > 0;
|
||||
if (hasPathResults) {
|
||||
const pathLines = generateMarkdownForPathResults(interpretedResult, language);
|
||||
lines.push(...pathLines);
|
||||
}
|
||||
|
||||
// Padding between results
|
||||
lines.push(
|
||||
'----------------------------------------',
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForCodeSnippet(
|
||||
codeSnippet: CodeSnippet,
|
||||
language: string,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
const snippetStartLine = codeSnippet.startLine || 0;
|
||||
const codeLines = codeSnippet.text
|
||||
.split('\n')
|
||||
.map((line, index) =>
|
||||
highlightCodeLines(line, index + snippetStartLine, highlightedRegion)
|
||||
);
|
||||
|
||||
// Make sure there are no extra newlines before or after the <code> block:
|
||||
const codeLinesWrapped = [...codeLines];
|
||||
codeLinesWrapped[0] = `<pre><code class="${language}">${codeLinesWrapped[0]}`;
|
||||
codeLinesWrapped[codeLinesWrapped.length - 1] = `${codeLinesWrapped[codeLinesWrapped.length - 1]}</code></pre>`;
|
||||
|
||||
lines.push(
|
||||
...codeLinesWrapped,
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function highlightCodeLines(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return line;
|
||||
}
|
||||
const partiallyHighlightedLine = parseHighlightedLine(
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
);
|
||||
return `${partiallyHighlightedLine.plainSection1}<strong>${partiallyHighlightedLine.highlightedSection}</strong>${partiallyHighlightedLine.plainSection2}`;
|
||||
}
|
||||
|
||||
function generateMarkdownForAlertMessage(
|
||||
interpretedResult: AnalysisAlert
|
||||
): string {
|
||||
let alertMessage = '';
|
||||
for (const token of interpretedResult.message.tokens) {
|
||||
if (token.t === 'text') {
|
||||
alertMessage += token.text;
|
||||
} else if (token.t === 'location') {
|
||||
alertMessage += createMarkdownRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine,
|
||||
token.text
|
||||
);
|
||||
}
|
||||
}
|
||||
// Italicize the alert message
|
||||
return `*${alertMessage}*`;
|
||||
}
|
||||
|
||||
function generateMarkdownForPathResults(
|
||||
interpretedResult: AnalysisAlert,
|
||||
language: string
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push('#### Paths', '');
|
||||
for (const codeFlow of interpretedResult.codeFlows) {
|
||||
const pathLines: string[] = [];
|
||||
const stepCount = codeFlow.threadFlows.length;
|
||||
const title = `Path with ${stepCount} steps`;
|
||||
for (let i = 0; i < stepCount; i++) {
|
||||
const threadFlow = codeFlow.threadFlows[i];
|
||||
const link = createMarkdownRemoteFileRef(
|
||||
threadFlow.fileLink,
|
||||
threadFlow.highlightedRegion?.startLine,
|
||||
threadFlow.highlightedRegion?.endLine
|
||||
);
|
||||
const codeSnippet = generateMarkdownForCodeSnippet(
|
||||
threadFlow.codeSnippet,
|
||||
language,
|
||||
threadFlow.highlightedRegion
|
||||
);
|
||||
// Indent the snippet to fit with the numbered list.
|
||||
const codeSnippetIndented = codeSnippet.map((line) => ` ${line}`);
|
||||
pathLines.push(`${i + 1}. ${link}`, ...codeSnippetIndented);
|
||||
}
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection(title, pathLines)
|
||||
);
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawResults(
|
||||
analysisRawResults: AnalysisRawResults
|
||||
): string[] {
|
||||
const tableRows: string[] = [];
|
||||
const columnCount = analysisRawResults.schema.columns.length;
|
||||
// Table headers are the column names if they exist, and empty otherwise
|
||||
const headers = analysisRawResults.schema.columns.map(
|
||||
(column) => column.name || ''
|
||||
);
|
||||
const tableHeader = `| ${headers.join(' | ')} |`;
|
||||
|
||||
tableRows.push(tableHeader);
|
||||
tableRows.push('|' + ' --- |'.repeat(columnCount));
|
||||
|
||||
for (const row of analysisRawResults.resultSet.rows) {
|
||||
const cells = row.map((cell) =>
|
||||
generateMarkdownForRawTableCell(cell, analysisRawResults.fileLinkPrefix)
|
||||
);
|
||||
tableRows.push(`| ${cells.join(' | ')} |`);
|
||||
}
|
||||
return tableRows;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawTableCell(
|
||||
value: CellValue,
|
||||
fileLinkPrefix: string
|
||||
) {
|
||||
let cellValue: string;
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
cellValue = `\`${convertNonPrintableChars(value.toString())}\``;
|
||||
break;
|
||||
case 'object':
|
||||
{
|
||||
const url = tryGetRemoteLocation(value.url, fileLinkPrefix);
|
||||
cellValue = `[\`${convertNonPrintableChars(value.label)}\`](${url})`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
// `|` characters break the table, so we need to escape them
|
||||
return cellValue.replaceAll('|', '\\|');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a markdown link to a remote file.
|
||||
* If the "link text" is not provided, we use the file path.
|
||||
*/
|
||||
export function createMarkdownRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number,
|
||||
linkText?: string,
|
||||
): string {
|
||||
const markdownLink = `[${linkText || fileLink.filePath}](${createRemoteFileRef(fileLink, startLine, endLine)})`;
|
||||
return markdownLink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an expandable markdown section of the form:
|
||||
* <details>
|
||||
* <summary>title</summary>
|
||||
*
|
||||
* contents
|
||||
*
|
||||
* </details>
|
||||
*/
|
||||
function buildExpandableMarkdownSection(title: string, contents: string[]): string[] {
|
||||
const expandableLines: string[] = [];
|
||||
expandableLines.push(
|
||||
'<details>',
|
||||
`<summary>${title}</summary>`,
|
||||
'',
|
||||
...contents,
|
||||
'',
|
||||
'</details>',
|
||||
''
|
||||
);
|
||||
return expandableLines;
|
||||
}
|
||||
|
||||
function createRelativeLink(fileName: string, linkType: MarkdownLinkType): string {
|
||||
switch (linkType) {
|
||||
case 'local':
|
||||
return `./${fileName}.md`;
|
||||
|
||||
case 'gist':
|
||||
// Creates an anchor link to a file in the gist. This is of the form:
|
||||
// '#file-<name>-<file-extension>'
|
||||
return `#file-${fileName}-md`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the name of the markdown file for a given repository nwo.
|
||||
* This name doesn't include the file extension.
|
||||
*/
|
||||
function createFileName(nwo: string) {
|
||||
const [owner, repo] = nwo.split('/');
|
||||
return `${owner}-${repo}`;
|
||||
}
|
||||
@@ -49,7 +49,7 @@ export class RemoteQueriesMonitor {
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
void this.logger.log('Remote query monitoring timed out after 2 days');
|
||||
void this.logger.log('Variant analysis monitoring timed out after 2 days');
|
||||
return { status: 'Cancelled' };
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
|
||||
/**
|
||||
* Information about a remote query.
|
||||
*/
|
||||
export interface RemoteQueryHistoryItem {
|
||||
readonly t: 'remote';
|
||||
failureReason?: string;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
readonly queryId: string,
|
||||
remoteQuery: RemoteQuery;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
@@ -1,13 +1,22 @@
|
||||
export interface RemoteQueryResultIndex {
|
||||
artifactsUrlPath: string;
|
||||
items: RemoteQueryResultIndexItem[];
|
||||
successes: RemoteQuerySuccessIndexItem[];
|
||||
failures: RemoteQueryFailureIndexItem[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryResultIndexItem {
|
||||
export interface RemoteQuerySuccessIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
sha?: string;
|
||||
resultCount: number;
|
||||
bqrsFileSize: number;
|
||||
sarifFileSize?: number;
|
||||
}
|
||||
|
||||
export interface RemoteQueryFailureIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import { DownloadLink } from './download-link';
|
||||
import { AnalysisFailure } from './shared/analysis-failure';
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
executionEndTime: Date;
|
||||
executionEndTime: number; // Can't use a Date here since it needs to be serialized and desserialized.
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
analysisFailures: AnalysisFailure[];
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string,
|
||||
databaseSha: string,
|
||||
resultCount: number,
|
||||
downloadLink: DownloadLink,
|
||||
fileSizeInBytes: number
|
||||
|
||||
@@ -4,8 +4,8 @@ export interface RemoteQuery {
|
||||
queryName: string;
|
||||
queryFilePath: string;
|
||||
queryText: string;
|
||||
language: string;
|
||||
controllerRepository: Repository;
|
||||
repositories: Repository[];
|
||||
executionStartTime: Date;
|
||||
executionStartTime: number; // Use number here since it needs to be serialized and desserialized.
|
||||
actionsWorkflowRunId: number;
|
||||
}
|
||||
|
||||
116
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
116
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { QuickPickItem, window } from 'vscode';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteRepositoryLists } from '../config';
|
||||
import { REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
|
||||
export interface RepositorySelection {
|
||||
repositories?: string[];
|
||||
repositoryLists?: string[]
|
||||
}
|
||||
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repositories?: string[];
|
||||
repositoryList?: string;
|
||||
useCustomRepository?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the repositories or repository lists to run the query against.
|
||||
* @returns The user selection.
|
||||
*/
|
||||
export async function getRepositorySelection(): Promise<RepositorySelection> {
|
||||
const quickPickItems = [
|
||||
createCustomRepoQuickPickItem(),
|
||||
...createSystemDefinedRepoListsQuickPickItems(),
|
||||
...createUserDefinedRepoListsQuickPickItems(),
|
||||
];
|
||||
|
||||
const options = {
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.variantAnalysis.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
};
|
||||
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
options);
|
||||
|
||||
if (quickpick?.repositories?.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repositories.join(', ')}`);
|
||||
return { repositories: quickpick.repositories };
|
||||
} else if (quickpick?.repositoryList) {
|
||||
void logger.log(`Selected repository list: ${quickpick.repositoryList}`);
|
||||
return { repositoryLists: [quickpick.repositoryList] };
|
||||
} else if (quickpick?.useCustomRepository) {
|
||||
const customRepo = await getCustomRepo();
|
||||
if (!customRepo || !REPO_REGEX.test(customRepo)) {
|
||||
throw new UserCancellationException('Invalid repository format. Please enter a valid repository in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
}
|
||||
void logger.log(`Entered repository: ${customRepo}`);
|
||||
return { repositories: [customRepo] };
|
||||
} else {
|
||||
// We don't need to display a warning pop-up in this case, since the user just escaped out of the operation.
|
||||
// We set 'true' to make this a silent exception.
|
||||
throw new UserCancellationException('No repositories selected', true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the selection is valid or not.
|
||||
* @param repoSelection The selection to check.
|
||||
* @returns A boolean flag indicating if the selection is valid or not.
|
||||
*/
|
||||
export function isValidSelection(repoSelection: RepositorySelection): boolean {
|
||||
if (repoSelection.repositories === undefined && repoSelection.repositoryLists === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (repoSelection.repositories !== undefined && repoSelection.repositories.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (repoSelection.repositoryLists?.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function createSystemDefinedRepoListsQuickPickItems(): RepoListQuickPickItem[] {
|
||||
const topNs = [10, 100, 1000];
|
||||
|
||||
return topNs.map(n => ({
|
||||
label: '$(star) Top ' + n,
|
||||
repositoryList: `top_${n}`,
|
||||
alwaysShow: true
|
||||
} as RepoListQuickPickItem));
|
||||
}
|
||||
|
||||
function createUserDefinedRepoListsQuickPickItems(): RepoListQuickPickItem[] {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (!repoLists) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(repoLists).map<RepoListQuickPickItem>(([label, repositories]) => (
|
||||
{
|
||||
label, // the name of the repository list
|
||||
repositories // the actual array of repositories
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
function createCustomRepoQuickPickItem(): RepoListQuickPickItem {
|
||||
return {
|
||||
label: '$(edit) Enter a GitHub repository',
|
||||
useCustomRepository: true,
|
||||
alwaysShow: true,
|
||||
};
|
||||
}
|
||||
|
||||
async function getCustomRepo(): Promise<string | undefined> {
|
||||
return await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.variantAnalysis.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { CancellationToken, QuickPickItem, Uri, window } from 'vscode';
|
||||
import { CancellationToken, Uri, window } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import {
|
||||
askForLanguage,
|
||||
@@ -9,25 +10,20 @@ import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tryGetQueryMetadata
|
||||
tryGetQueryMetadata,
|
||||
tmpDir
|
||||
} from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import * as cli from '../cli';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteControllerRepo, getRemoteRepositoryLists, setRemoteControllerRepo } from '../config';
|
||||
import { tmpDir } from '../run-queries';
|
||||
import { getActionBranch, getRemoteControllerRepo, setRemoteControllerRepo } from '../config';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { OctokitResponse } from '@octokit/types/dist-types';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
|
||||
interface Config {
|
||||
repositories: string[];
|
||||
ref?: string;
|
||||
language?: string;
|
||||
}
|
||||
import { getErrorMessage, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { getRepositorySelection, isValidSelection, RepositorySelection } from './repository-selection';
|
||||
|
||||
export interface QlPack {
|
||||
name: string;
|
||||
@@ -36,71 +32,21 @@ export interface QlPack {
|
||||
defaultSuite?: Record<string, unknown>[];
|
||||
defaultSuiteFile?: string;
|
||||
}
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repoList: string[];
|
||||
}
|
||||
|
||||
interface QueriesResponse {
|
||||
workflow_run_id: number
|
||||
workflow_run_id: number,
|
||||
errors?: {
|
||||
invalid_repositories?: string[],
|
||||
repositories_without_database?: string[],
|
||||
},
|
||||
repositories_queried?: string[],
|
||||
}
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters or single hyphens, starting and ending in an alphanumeric character
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, or underscores
|
||||
*/
|
||||
const REPO_REGEX = /^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+\/[a-zA-Z0-9-_]+$/;
|
||||
|
||||
/**
|
||||
* Well-known names for the query pack used by the server.
|
||||
*/
|
||||
const QUERY_PACK_NAME = 'codeql-remote/query';
|
||||
|
||||
/**
|
||||
* Gets the repositories to run the query against.
|
||||
*/
|
||||
export async function getRepositories(): Promise<string[] | undefined> {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (repoLists && Object.keys(repoLists).length) {
|
||||
const quickPickItems = Object.entries(repoLists).map<RepoListQuickPickItem>(([key, value]) => (
|
||||
{
|
||||
label: key, // the name of the repository list
|
||||
repoList: value, // the actual array of repositories
|
||||
}
|
||||
));
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
{
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.remoteQueries.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (quickpick?.repoList.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repoList.join(', ')}`);
|
||||
return quickpick.repoList;
|
||||
} else {
|
||||
void showAndLogErrorMessage('No repositories selected.');
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
void logger.log('No repository lists defined. Displaying text input box.');
|
||||
const remoteRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.remoteQueries.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!remoteRepo) {
|
||||
void showAndLogErrorMessage('No repositories entered.');
|
||||
return;
|
||||
} else if (!REPO_REGEX.test(remoteRepo)) { // Check if user entered invalid input
|
||||
void showAndLogErrorMessage('Invalid repository format. Must be in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
return;
|
||||
}
|
||||
void logger.log(`Entered repository: ${remoteRepo}`);
|
||||
return [remoteRepo];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Two possibilities:
|
||||
* 1. There is no qlpack.yml in this directory. Assume this is a lone query and generate a synthetic qlpack for it.
|
||||
@@ -108,7 +54,7 @@ export async function getRepositories(): Promise<string[] | undefined> {
|
||||
*
|
||||
* @returns the entire qlpack as a base64 string.
|
||||
*/
|
||||
async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: string, queryPackDir: string, fallbackLanguage?: string): Promise<{
|
||||
async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: string, queryPackDir: string): Promise<{
|
||||
base64Pack: string,
|
||||
language: string
|
||||
}> {
|
||||
@@ -150,7 +96,7 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
|
||||
|
||||
} else {
|
||||
// open popup to ask for language if not already hardcoded
|
||||
language = fallbackLanguage || await askForLanguage(cliServer);
|
||||
language = await askForLanguage(cliServer);
|
||||
|
||||
// copy only the query file to the query pack directory
|
||||
// and generate a synthetic query pack
|
||||
@@ -172,6 +118,9 @@ async function generateQueryPack(cliServer: cli.CodeQLCliServer, queryFile: stri
|
||||
|
||||
await ensureNameAndSuite(queryPackDir, packRelativePath);
|
||||
|
||||
// Clear the cliServer cache so that the previous qlpack text is purged from the CLI.
|
||||
await cliServer.clearCache();
|
||||
|
||||
const bundlePath = await getPackedBundlePath(queryPackDir);
|
||||
void logger.log(`Compiling and bundling query pack from ${queryPackDir} to ${bundlePath}. (This may take a while.)`);
|
||||
await cliServer.packInstall(queryPackDir);
|
||||
@@ -228,7 +177,7 @@ export async function runRemoteQuery(
|
||||
token: CancellationToken
|
||||
): Promise<void | RemoteQuerySubmissionResult> {
|
||||
if (!(await cliServer.cliConstraints.supportsRemoteQueries())) {
|
||||
throw new Error(`Remote queries are not supported by this version of CodeQL. Please upgrade to v${cli.CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES
|
||||
throw new Error(`Variant analysis is not supported by this version of CodeQL. Please upgrade to v${cli.CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES
|
||||
} or later.`);
|
||||
}
|
||||
|
||||
@@ -238,47 +187,22 @@ export async function runRemoteQuery(
|
||||
throw new UserCancellationException('Not a CodeQL query file.');
|
||||
}
|
||||
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 1,
|
||||
message: 'Determining project list'
|
||||
});
|
||||
|
||||
const queryFile = uri.fsPath;
|
||||
const repositoriesFile = queryFile.substring(0, queryFile.length - '.ql'.length) + '.repositories';
|
||||
let ref: string | undefined;
|
||||
// For the case of single file remote queries, use the language from the config in order to avoid the user having to select it.
|
||||
let fallbackLanguage: string | undefined;
|
||||
let repositories: string[] | undefined;
|
||||
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 2,
|
||||
maxStep: 4,
|
||||
step: 1,
|
||||
message: 'Determining query target language'
|
||||
});
|
||||
|
||||
// If the user has an explicit `.repositories` file, use that.
|
||||
// Otherwise, prompt user to select repositories from the `codeQL.remoteQueries.repositoryLists` setting.
|
||||
if (await fs.pathExists(repositoriesFile)) {
|
||||
void logger.log(`Found '${repositoriesFile}'. Using information from that file to run ${queryFile}.`);
|
||||
|
||||
const config = yaml.safeLoad(await fs.readFile(repositoriesFile, 'utf8')) as Config;
|
||||
|
||||
ref = config.ref || 'main';
|
||||
fallbackLanguage = config.language;
|
||||
repositories = config.repositories;
|
||||
} else {
|
||||
ref = 'main';
|
||||
repositories = await getRepositories();
|
||||
}
|
||||
|
||||
if (!repositories || repositories.length === 0) {
|
||||
const repoSelection = await getRepositorySelection();
|
||||
if (!isValidSelection(repoSelection)) {
|
||||
throw new UserCancellationException('No repositories to query.');
|
||||
}
|
||||
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 3,
|
||||
maxStep: 4,
|
||||
step: 2,
|
||||
message: 'Determining controller repo'
|
||||
});
|
||||
|
||||
@@ -289,7 +213,7 @@ export async function runRemoteQuery(
|
||||
if (!controllerRepo || !REPO_REGEX.test(controllerRepo)) {
|
||||
void logger.log(controllerRepo ? 'Invalid controller repository name.' : 'No controller repository defined.');
|
||||
controllerRepo = await window.showInputBox({
|
||||
title: 'Controller repository in which to display progress and results of remote queries',
|
||||
title: 'Controller repository in which to display progress and results of variant analysis',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Enter the name of a GitHub repository in the format <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
@@ -309,8 +233,8 @@ export async function runRemoteQuery(
|
||||
const [owner, repo] = controllerRepo.split('/');
|
||||
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 4,
|
||||
maxStep: 4,
|
||||
step: 3,
|
||||
message: 'Bundling the query pack'
|
||||
});
|
||||
|
||||
@@ -318,20 +242,21 @@ export async function runRemoteQuery(
|
||||
throw new UserCancellationException('Cancelled');
|
||||
}
|
||||
|
||||
const { base64Pack, language } = await generateQueryPack(cliServer, queryFile, queryPackDir, fallbackLanguage);
|
||||
const { base64Pack, language } = await generateQueryPack(cliServer, queryFile, queryPackDir);
|
||||
|
||||
if (token.isCancellationRequested) {
|
||||
throw new UserCancellationException('Cancelled');
|
||||
}
|
||||
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 5,
|
||||
maxStep: 4,
|
||||
step: 4,
|
||||
message: 'Sending request'
|
||||
});
|
||||
|
||||
const workflowRunId = await runRemoteQueriesApiRequest(credentials, ref, language, repositories, owner, repo, base64Pack, dryRun);
|
||||
const queryStartTime = new Date();
|
||||
const actionBranch = getActionBranch();
|
||||
const workflowRunId = await runRemoteQueriesApiRequest(credentials, actionBranch, language, repoSelection, owner, repo, base64Pack, dryRun);
|
||||
const queryStartTime = Date.now();
|
||||
const queryMetadata = await tryGetQueryMetadata(cliServer, queryFile);
|
||||
|
||||
if (dryRun) {
|
||||
@@ -341,7 +266,14 @@ export async function runRemoteQuery(
|
||||
return;
|
||||
}
|
||||
|
||||
const remoteQuery = await buildRemoteQueryEntity(repositories, queryFile, queryMetadata, owner, repo, queryStartTime, workflowRunId);
|
||||
const remoteQuery = await buildRemoteQueryEntity(
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
owner,
|
||||
repo,
|
||||
queryStartTime,
|
||||
workflowRunId,
|
||||
language);
|
||||
|
||||
// don't return the path because it has been deleted
|
||||
return { query: remoteQuery };
|
||||
@@ -361,15 +293,30 @@ async function runRemoteQueriesApiRequest(
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
repoSelection: RepositorySelection,
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
): Promise<void | number> {
|
||||
const data = {
|
||||
ref,
|
||||
language,
|
||||
repositories: repoSelection.repositories ?? undefined,
|
||||
repository_lists: repoSelection.repositoryLists ?? undefined,
|
||||
query_pack: queryPackBase64,
|
||||
};
|
||||
|
||||
if (dryRun) {
|
||||
void showAndLogInformationMessage('[DRY RUN] Would have sent request. See extension log for the payload.');
|
||||
void logger.log(JSON.stringify({ ref, language, repositories, owner, repo, queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes' }));
|
||||
void logger.log(JSON.stringify({
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
...data,
|
||||
queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes'
|
||||
}
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -380,61 +327,44 @@ async function runRemoteQueriesApiRequest(
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
ref,
|
||||
language,
|
||||
repositories,
|
||||
query_pack: queryPackBase64,
|
||||
}
|
||||
data
|
||||
}
|
||||
);
|
||||
const workflowRunId = response.data.workflow_run_id;
|
||||
void showAndLogInformationMessage(`Successfully scheduled runs. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${workflowRunId}).`);
|
||||
return workflowRunId;
|
||||
const { popupMessage, logMessage } = parseResponse(owner, repo, response.data);
|
||||
void showAndLogInformationMessage(popupMessage, { fullMessage: logMessage });
|
||||
return response.data.workflow_run_id;
|
||||
} catch (error) {
|
||||
return await attemptRerun(error, credentials, ref, language, repositories, owner, repo, queryPackBase64, dryRun);
|
||||
void showAndLogErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
/** Attempts to rerun the query on only the valid repositories */
|
||||
export async function attemptRerun(
|
||||
error: any,
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
) {
|
||||
if (typeof error.message === 'string' && error.message.includes('Some repositories were invalid')) {
|
||||
const invalidRepos = error?.response?.data?.invalid_repos || [];
|
||||
const reposWithoutDbUploads = error?.response?.data?.repos_without_db_uploads || [];
|
||||
void logger.log('Unable to run query on some of the specified repositories');
|
||||
if (invalidRepos.length > 0) {
|
||||
void logger.log(`Invalid repos: ${invalidRepos.join(', ')}`);
|
||||
}
|
||||
if (reposWithoutDbUploads.length > 0) {
|
||||
void logger.log(`Repos without DB uploads: ${reposWithoutDbUploads.join(', ')}`);
|
||||
}
|
||||
const eol = os.EOL;
|
||||
const eol2 = os.EOL + os.EOL;
|
||||
|
||||
if (invalidRepos.length + reposWithoutDbUploads.length === repositories.length) {
|
||||
// Every repo is invalid in some way
|
||||
void showAndLogErrorMessage('Unable to run query on any of the specified repositories.');
|
||||
return;
|
||||
}
|
||||
// exported for testng only
|
||||
export function parseResponse(owner: string, repo: string, response: QueriesResponse) {
|
||||
const popupMessage = `Successfully scheduled runs. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}).`
|
||||
+ (response.errors ? `${eol2}Some repositories could not be scheduled. See extension log for details.` : '');
|
||||
|
||||
const popupMessage = 'Unable to run query on some of the specified repositories. [See logs for more details](command:codeQL.showLogs).';
|
||||
const rerunQuery = await showInformationMessageWithAction(popupMessage, 'Rerun on the valid repositories only');
|
||||
if (rerunQuery) {
|
||||
const validRepositories = repositories.filter(r => !invalidRepos.includes(r) && !reposWithoutDbUploads.includes(r));
|
||||
void logger.log(`Rerunning query on set of valid repositories: ${JSON.stringify(validRepositories)}`);
|
||||
return await runRemoteQueriesApiRequest(credentials, ref, language, validRepositories, owner, repo, queryPackBase64, dryRun);
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage(error);
|
||||
let logMessage = `Successfully scheduled runs. See https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}.`;
|
||||
if (response.repositories_queried) {
|
||||
logMessage += `${eol2}Repositories queried:${eol}${response.repositories_queried.join(', ')}`;
|
||||
}
|
||||
if (response.errors) {
|
||||
logMessage += `${eol2}Some repositories could not be scheduled.`;
|
||||
if (response.errors.invalid_repositories?.length) {
|
||||
logMessage += `${eol2}Invalid repositories:${eol}${response.errors.invalid_repositories.join(', ')}`;
|
||||
}
|
||||
if (response.errors.repositories_without_database?.length) {
|
||||
logMessage += `${eol2}Repositories without databases:${eol}${response.errors.repositories_without_database.join(', ')}`;
|
||||
logMessage += `${eol}For each public repository that has not yet been added to the database service, we will try to create a database next time the store is updated.`;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
popupMessage,
|
||||
logMessage
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -454,7 +384,7 @@ async function ensureNameAndSuite(queryPackDir: string, packRelativePath: string
|
||||
qlpack.name = QUERY_PACK_NAME;
|
||||
|
||||
qlpack.defaultSuite = [{
|
||||
description: 'Query suite for remote query'
|
||||
description: 'Query suite for variant analysis'
|
||||
}, {
|
||||
query: packRelativePath.replace(/\\/g, '/')
|
||||
}];
|
||||
@@ -462,33 +392,28 @@ async function ensureNameAndSuite(queryPackDir: string, packRelativePath: string
|
||||
}
|
||||
|
||||
async function buildRemoteQueryEntity(
|
||||
repositories: string[],
|
||||
queryFilePath: string,
|
||||
queryMetadata: QueryMetadata | undefined,
|
||||
controllerRepoOwner: string,
|
||||
controllerRepoName: string,
|
||||
queryStartTime: Date,
|
||||
workflowRunId: number
|
||||
queryStartTime: number,
|
||||
workflowRunId: number,
|
||||
language: string
|
||||
): Promise<RemoteQuery> {
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
const queryName = queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
|
||||
const queryRepos = repositories.map(r => {
|
||||
const [owner, repo] = r.split('/');
|
||||
return { owner: owner, name: repo };
|
||||
});
|
||||
|
||||
const queryText = await fs.readFile(queryFilePath, 'utf8');
|
||||
|
||||
return {
|
||||
queryName,
|
||||
queryFilePath,
|
||||
queryText,
|
||||
language,
|
||||
controllerRepository: {
|
||||
owner: controllerRepoOwner,
|
||||
name: controllerRepoName,
|
||||
},
|
||||
repositories: queryRepos,
|
||||
executionStartTime: queryStartTime,
|
||||
actionsWorkflowRunId: workflowRunId
|
||||
};
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryResult } from './remote-query-result';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
export const sampleRemoteQuery: RemoteQuery = {
|
||||
queryName: 'Inefficient regular expression',
|
||||
queryFilePath: '/Users/foo/dev/vscode-codeql-starter/ql/javascript/ql/src/Performance/ReDoS.ql',
|
||||
queryText: '/**\n * @name Inefficient regular expression\n * @description A regular expression that requires exponential time to match certain inputs\n * can be a performance bottleneck, and may be vulnerable to denial-of-service\n * attacks.\n * @kind problem\n * @problem.severity error\n * @security-severity 7.5\n * @precision high\n * @id js/redos\n * @tags security\n * external/cwe/cwe-1333\n * external/cwe/cwe-730\n * external/cwe/cwe-400\n */\n\nimport javascript\nimport semmle.javascript.security.performance.ReDoSUtil\nimport semmle.javascript.security.performance.ExponentialBackTracking\n\nfrom RegExpTerm t, string pump, State s, string prefixMsg\nwhere hasReDoSResult(t, pump, s, prefixMsg)\nselect t,\n "This part of the regular expression may cause exponential backtracking on strings " + prefixMsg +\n "containing many repetitions of \'" + pump + "\'."\n',
|
||||
controllerRepository: {
|
||||
owner: 'big-corp',
|
||||
name: 'controller-repo'
|
||||
},
|
||||
repositories: [
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo1'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo2'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo3'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo4'
|
||||
},
|
||||
{
|
||||
owner: 'big-corp',
|
||||
name: 'repo5'
|
||||
}
|
||||
],
|
||||
executionStartTime: new Date('2022-01-06T17:02:15.026Z'),
|
||||
actionsWorkflowRunId: 1662757118
|
||||
};
|
||||
|
||||
export const sampleRemoteQueryResult: RemoteQueryResult = {
|
||||
executionEndTime: new Date('2022-01-06T17:04:37.026Z'),
|
||||
analysisSummaries: [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
resultCount: 85,
|
||||
fileSizeInBytes: 14123,
|
||||
downloadLink: {
|
||||
id: '137697017',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697017',
|
||||
innerFilePath: 'results.sarif'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
resultCount: 20,
|
||||
fileSizeInBytes: 8698,
|
||||
downloadLink: {
|
||||
id: '137697018',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697018',
|
||||
innerFilePath: 'results.sarif'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
resultCount: 8,
|
||||
fileSizeInBytes: 4123,
|
||||
downloadLink: {
|
||||
id: '137697019',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697019',
|
||||
innerFilePath: 'results.sarif'
|
||||
}
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
resultCount: 3,
|
||||
fileSizeInBytes: 3313,
|
||||
downloadLink: {
|
||||
id: '137697020',
|
||||
urlPath: '/repos/big-corp/controller-repo/actions/artifacts/137697020',
|
||||
innerFilePath: 'results.sarif'
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
const createAnalysisResults = (n: number) => Array(n).fill({ 'message': 'Sample text' });
|
||||
|
||||
export const sampleAnalysesResultsStage1: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
// No entries for repo4
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsStage2: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'InProgress',
|
||||
results: []
|
||||
},
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsStage3: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(8)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(3)
|
||||
},
|
||||
];
|
||||
|
||||
export const sampleAnalysesResultsWithFailure: AnalysisResults[] = [
|
||||
{
|
||||
nwo: 'big-corp/repo1',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(85)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo2',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(20)
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo3',
|
||||
status: 'Failed',
|
||||
results: []
|
||||
},
|
||||
{
|
||||
nwo: 'big-corp/repo4',
|
||||
status: 'Completed',
|
||||
results: createAnalysisResults(3)
|
||||
},
|
||||
];
|
||||
253
extensions/ql-vscode/src/remote-queries/sarif-processing.ts
Normal file
253
extensions/ql-vscode/src/remote-queries/sarif-processing.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
import * as sarif from 'sarif';
|
||||
import { parseSarifPlainTextMessage, parseSarifRegion } from '../pure/sarif-utils';
|
||||
|
||||
import {
|
||||
AnalysisAlert,
|
||||
CodeFlow,
|
||||
AnalysisMessage,
|
||||
AnalysisMessageToken,
|
||||
ResultSeverity,
|
||||
ThreadFlow,
|
||||
CodeSnippet,
|
||||
HighlightedRegion
|
||||
} from './shared/analysis-result';
|
||||
|
||||
const defaultSeverity = 'Warning';
|
||||
|
||||
export function extractAnalysisAlerts(
|
||||
sarifLog: sarif.Log,
|
||||
fileLinkPrefix: string
|
||||
): {
|
||||
alerts: AnalysisAlert[],
|
||||
errors: string[]
|
||||
} {
|
||||
const alerts: AnalysisAlert[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const run of sarifLog.runs ?? []) {
|
||||
for (const result of run.results ?? []) {
|
||||
try {
|
||||
alerts.push(...extractResultAlerts(run, result, fileLinkPrefix));
|
||||
} catch (e) {
|
||||
errors.push(`Error when processing SARIF result: ${e}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { alerts, errors };
|
||||
}
|
||||
|
||||
function extractResultAlerts(
|
||||
run: sarif.Run,
|
||||
result: sarif.Result,
|
||||
fileLinkPrefix: string
|
||||
): AnalysisAlert[] {
|
||||
const alerts: AnalysisAlert[] = [];
|
||||
|
||||
const message = getMessage(result, fileLinkPrefix);
|
||||
const rule = tryGetRule(run, result);
|
||||
const severity = tryGetSeverity(run, result, rule) || defaultSeverity;
|
||||
const codeFlows = getCodeFlows(result, fileLinkPrefix);
|
||||
const shortDescription = getShortDescription(rule, message!);
|
||||
|
||||
for (const location of result.locations ?? []) {
|
||||
const physicalLocation = location.physicalLocation!;
|
||||
const filePath = physicalLocation.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
const analysisAlert: AnalysisAlert = {
|
||||
message,
|
||||
shortDescription,
|
||||
fileLink: {
|
||||
fileLinkPrefix,
|
||||
filePath,
|
||||
},
|
||||
severity,
|
||||
codeSnippet,
|
||||
highlightedRegion,
|
||||
codeFlows: codeFlows
|
||||
};
|
||||
|
||||
alerts.push(analysisAlert);
|
||||
}
|
||||
|
||||
return alerts;
|
||||
}
|
||||
|
||||
function getShortDescription(
|
||||
rule: sarif.ReportingDescriptor | undefined,
|
||||
message: AnalysisMessage,
|
||||
): string {
|
||||
if (rule?.shortDescription?.text) {
|
||||
return rule.shortDescription.text;
|
||||
}
|
||||
|
||||
return message.tokens.map(token => token.text).join('');
|
||||
}
|
||||
|
||||
export function tryGetSeverity(
|
||||
sarifRun: sarif.Run,
|
||||
result: sarif.Result,
|
||||
rule: sarif.ReportingDescriptor | undefined
|
||||
): ResultSeverity | undefined {
|
||||
if (!sarifRun || !result || !rule) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const severity = rule.properties?.['problem.severity'];
|
||||
if (!severity) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
switch (severity.toLowerCase()) {
|
||||
case 'recommendation':
|
||||
return 'Recommendation';
|
||||
case 'warning':
|
||||
return 'Warning';
|
||||
case 'error':
|
||||
return 'Error';
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function tryGetRule(
|
||||
sarifRun: sarif.Run,
|
||||
result: sarif.Result
|
||||
): sarif.ReportingDescriptor | undefined {
|
||||
if (!sarifRun || !result) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const resultRule = result.rule;
|
||||
if (!resultRule) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// The rule can found in two places:
|
||||
// - Either in the run's tool driver tool component
|
||||
// - Or in the run's tool extensions tool component
|
||||
|
||||
const ruleId = resultRule.id;
|
||||
if (ruleId) {
|
||||
const rule = sarifRun.tool.driver.rules?.find(r => r.id === ruleId);
|
||||
if (rule) {
|
||||
return rule;
|
||||
}
|
||||
}
|
||||
|
||||
const ruleIndex = resultRule.index;
|
||||
if (ruleIndex != undefined) {
|
||||
const toolComponentIndex = result.rule?.toolComponent?.index;
|
||||
const toolExtensions = sarifRun.tool.extensions;
|
||||
if (toolComponentIndex !== undefined && toolExtensions !== undefined) {
|
||||
const toolComponent = toolExtensions[toolComponentIndex];
|
||||
if (toolComponent?.rules !== undefined) {
|
||||
return toolComponent.rules[ruleIndex];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Couldn't find the rule.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getCodeSnippet(region?: sarif.Region, alternateRegion?: sarif.Region): CodeSnippet | undefined {
|
||||
region = region ?? alternateRegion;
|
||||
|
||||
if (!region) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const text = region.snippet?.text || '';
|
||||
const { startLine, endLine } = parseSarifRegion(region);
|
||||
|
||||
return {
|
||||
startLine,
|
||||
endLine,
|
||||
text
|
||||
};
|
||||
}
|
||||
|
||||
function getHighlightedRegion(region: sarif.Region): HighlightedRegion {
|
||||
const { startLine, startColumn, endLine, endColumn } = parseSarifRegion(region);
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
|
||||
// parseSarifRegion currently shifts the end column by 1 to account
|
||||
// for the way vscode counts columns so we need to shift it back.
|
||||
endColumn: endColumn + 1
|
||||
};
|
||||
}
|
||||
|
||||
function getCodeFlows(
|
||||
result: sarif.Result,
|
||||
fileLinkPrefix: string
|
||||
): CodeFlow[] {
|
||||
const codeFlows = [];
|
||||
|
||||
if (result.codeFlows) {
|
||||
for (const codeFlow of result.codeFlows) {
|
||||
const threadFlows = [];
|
||||
|
||||
for (const threadFlow of codeFlow.threadFlows) {
|
||||
for (const threadFlowLocation of threadFlow.locations) {
|
||||
const physicalLocation = threadFlowLocation!.location!.physicalLocation!;
|
||||
const filePath = physicalLocation!.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
threadFlows.push({
|
||||
fileLink: {
|
||||
fileLinkPrefix,
|
||||
filePath,
|
||||
},
|
||||
codeSnippet,
|
||||
highlightedRegion
|
||||
} as ThreadFlow);
|
||||
}
|
||||
}
|
||||
|
||||
codeFlows.push({ threadFlows } as CodeFlow);
|
||||
}
|
||||
}
|
||||
|
||||
return codeFlows;
|
||||
}
|
||||
|
||||
function getMessage(result: sarif.Result, fileLinkPrefix: string): AnalysisMessage {
|
||||
const tokens: AnalysisMessageToken[] = [];
|
||||
|
||||
const messageText = result.message!.text!;
|
||||
const messageParts = parseSarifPlainTextMessage(messageText);
|
||||
|
||||
for (const messagePart of messageParts) {
|
||||
if (typeof messagePart === 'string') {
|
||||
tokens.push({ t: 'text', text: messagePart });
|
||||
} else {
|
||||
const relatedLocation = result.relatedLocations!.find(rl => rl.id === messagePart.dest);
|
||||
tokens.push({
|
||||
t: 'location',
|
||||
text: messagePart.text,
|
||||
location: {
|
||||
fileLink: {
|
||||
fileLinkPrefix: fileLinkPrefix,
|
||||
filePath: relatedLocation!.physicalLocation!.artifactLocation!.uri!,
|
||||
},
|
||||
highlightedRegion: getHighlightedRegion(relatedLocation!.physicalLocation!.region!),
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { tokens };
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface AnalysisFailure {
|
||||
nwo: string,
|
||||
error: string
|
||||
}
|
||||
@@ -1,11 +1,88 @@
|
||||
import { RawResultSet, ResultSetSchema } from '../../pure/bqrs-cli-types';
|
||||
|
||||
export type AnalysisResultStatus = 'InProgress' | 'Completed' | 'Failed';
|
||||
|
||||
export interface AnalysisResults {
|
||||
nwo: string;
|
||||
status: AnalysisResultStatus;
|
||||
results: QueryResult[];
|
||||
interpretedResults: AnalysisAlert[];
|
||||
rawResults?: AnalysisRawResults;
|
||||
}
|
||||
|
||||
export interface QueryResult {
|
||||
message?: string;
|
||||
export interface AnalysisRawResults {
|
||||
schema: ResultSetSchema;
|
||||
resultSet: RawResultSet;
|
||||
fileLinkPrefix: string;
|
||||
capped: boolean;
|
||||
}
|
||||
|
||||
export interface AnalysisAlert {
|
||||
message: AnalysisMessage;
|
||||
shortDescription: string;
|
||||
severity: ResultSeverity;
|
||||
fileLink: FileLink;
|
||||
codeSnippet?: CodeSnippet;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
codeFlows: CodeFlow[];
|
||||
}
|
||||
|
||||
export interface FileLink {
|
||||
fileLinkPrefix: string;
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
export interface CodeSnippet {
|
||||
startLine: number;
|
||||
endLine: number;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface HighlightedRegion {
|
||||
startLine: number;
|
||||
startColumn: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
}
|
||||
|
||||
export interface CodeFlow {
|
||||
threadFlows: ThreadFlow[];
|
||||
}
|
||||
|
||||
export interface ThreadFlow {
|
||||
fileLink: FileLink;
|
||||
codeSnippet: CodeSnippet;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
message?: AnalysisMessage;
|
||||
}
|
||||
|
||||
export interface AnalysisMessage {
|
||||
tokens: AnalysisMessageToken[]
|
||||
}
|
||||
|
||||
export type AnalysisMessageToken =
|
||||
| AnalysisMessageTextToken
|
||||
| AnalysisMessageLocationToken;
|
||||
|
||||
export interface AnalysisMessageTextToken {
|
||||
t: 'text';
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface AnalysisMessageLocationToken {
|
||||
t: 'location';
|
||||
text: string;
|
||||
location: {
|
||||
fileLink: FileLink;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
};
|
||||
}
|
||||
|
||||
export type ResultSeverity = 'Recommendation' | 'Warning' | 'Error';
|
||||
|
||||
/**
|
||||
* Returns the number of (raw + interpreted) results for an analysis.
|
||||
*/
|
||||
export const getAnalysisResultCount = (analysisResults: AnalysisResults): number => {
|
||||
const rawResultCount = analysisResults.rawResults?.resultSet.rows.length || 0;
|
||||
return analysisResults.interpretedResults.length + rawResultCount;
|
||||
};
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
import { DownloadLink } from '../download-link';
|
||||
import { AnalysisFailure } from './analysis-failure';
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
queryTitle: string;
|
||||
queryFileName: string;
|
||||
queryFilePath: string;
|
||||
queryText: string;
|
||||
language: string;
|
||||
workflowRunUrl: string;
|
||||
totalRepositoryCount: number;
|
||||
affectedRepositoryCount: number;
|
||||
totalResultCount: number;
|
||||
executionTimestamp: string;
|
||||
executionDuration: string;
|
||||
analysisSummaries: AnalysisSummary[]
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
analysisFailures: AnalysisFailure[];
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string,
|
||||
databaseSha: string,
|
||||
resultCount: number,
|
||||
downloadLink: DownloadLink,
|
||||
fileSize: string,
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// The maximum number of raw results to read from a BQRS file.
|
||||
// This is used to avoid reading the entire result set into memory
|
||||
// and trying to render it on screen. Users will be warned if the
|
||||
// results are capped.
|
||||
export const MAX_RAW_RESULTS = 500;
|
||||
@@ -0,0 +1,22 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const Button = styled.button`
|
||||
color: var(--vscode-button-foreground);
|
||||
background-color: var(--vscode-button-background);
|
||||
&:hover {
|
||||
text-decoration: none;
|
||||
background-color: var(--vscode-button-hoverBackground);
|
||||
}
|
||||
cursor: pointer;
|
||||
padding: 8px;
|
||||
border: 0;
|
||||
`;
|
||||
|
||||
const ActionButton = ({ text, onClick }: { text: string, onClick: () => void }) => (
|
||||
<Button onClick={onClick}>
|
||||
{text}
|
||||
</Button>
|
||||
);
|
||||
|
||||
export default ActionButton;
|
||||
@@ -0,0 +1,26 @@
|
||||
import * as React from 'react';
|
||||
import { AnalysisAlert } from '../shared/analysis-result';
|
||||
import CodePaths from './CodePaths';
|
||||
import FileCodeSnippet from './FileCodeSnippet';
|
||||
|
||||
const AnalysisAlertResult = ({ alert }: { alert: AnalysisAlert }) => {
|
||||
const showPathsLink = alert.codeFlows.length > 0;
|
||||
|
||||
return <FileCodeSnippet
|
||||
fileLink={alert.fileLink}
|
||||
codeSnippet={alert.codeSnippet}
|
||||
highlightedRegion={alert.highlightedRegion}
|
||||
severity={alert.severity}
|
||||
message={alert.message}
|
||||
messageChildren={
|
||||
showPathsLink && <CodePaths
|
||||
codeFlows={alert.codeFlows}
|
||||
ruleDescription={alert.shortDescription}
|
||||
severity={alert.severity}
|
||||
message={alert.message}
|
||||
/>
|
||||
}
|
||||
/>;
|
||||
};
|
||||
|
||||
export default AnalysisAlertResult;
|
||||
180
extensions/ql-vscode/src/remote-queries/view/CodePaths.tsx
Normal file
180
extensions/ql-vscode/src/remote-queries/view/CodePaths.tsx
Normal file
@@ -0,0 +1,180 @@
|
||||
import { TriangleDownIcon, XCircleIcon } from '@primer/octicons-react';
|
||||
import { ActionList, ActionMenu, Box, Button, Label, Link, Overlay } from '@primer/react';
|
||||
import * as React from 'react';
|
||||
import { useRef, useState } from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeFlow, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import FileCodeSnippet from './FileCodeSnippet';
|
||||
import SectionTitle from './SectionTitle';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
|
||||
const StyledCloseButton = styled.button`
|
||||
position: absolute;
|
||||
top: 1em;
|
||||
right: 4em;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
border: none;
|
||||
&:focus-visible {
|
||||
outline: none
|
||||
}
|
||||
`;
|
||||
|
||||
const OverlayContainer = styled.div`
|
||||
padding: 1em;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
padding: 2em;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
overflow-y: scroll;
|
||||
`;
|
||||
|
||||
const CloseButton = ({ onClick }: { onClick: () => void }) => (
|
||||
<StyledCloseButton onClick={onClick} tabIndex={-1} >
|
||||
<XCircleIcon size={24} />
|
||||
</StyledCloseButton>
|
||||
);
|
||||
|
||||
const CodePath = ({
|
||||
codeFlow,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlow: CodeFlow;
|
||||
message: AnalysisMessage;
|
||||
severity: ResultSeverity;
|
||||
}) => {
|
||||
return <>
|
||||
{codeFlow.threadFlows.map((threadFlow, index) =>
|
||||
<div key={`thread-flow-${index}`} style={{ maxWidth: '55em' }}>
|
||||
{index !== 0 && <VerticalSpace size={3} />}
|
||||
|
||||
<Box display="flex" justifyContent="center" alignItems="center">
|
||||
<Box flexGrow={1} p={0} border="none">
|
||||
<SectionTitle>Step {index + 1}</SectionTitle>
|
||||
</Box>
|
||||
{index === 0 &&
|
||||
<Box p={0} border="none">
|
||||
<Label>Source</Label>
|
||||
</Box>
|
||||
}
|
||||
{index === codeFlow.threadFlows.length - 1 &&
|
||||
<Box p={0} border="none">
|
||||
<Label>Sink</Label>
|
||||
</Box>
|
||||
}
|
||||
</Box>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<FileCodeSnippet
|
||||
fileLink={threadFlow.fileLink}
|
||||
codeSnippet={threadFlow.codeSnippet}
|
||||
highlightedRegion={threadFlow.highlightedRegion}
|
||||
severity={severity}
|
||||
message={index === codeFlow.threadFlows.length - 1 ? message : threadFlow.message} />
|
||||
</div>
|
||||
)}
|
||||
</>;
|
||||
};
|
||||
|
||||
const getCodeFlowName = (codeFlow: CodeFlow) => {
|
||||
const filePath = codeFlow.threadFlows[codeFlow.threadFlows.length - 1].fileLink.filePath;
|
||||
return filePath.substring(filePath.lastIndexOf('/') + 1);
|
||||
};
|
||||
|
||||
const Menu = ({
|
||||
codeFlows,
|
||||
setSelectedCodeFlow
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
setSelectedCodeFlow: (value: React.SetStateAction<CodeFlow>) => void
|
||||
}) => {
|
||||
return <ActionMenu>
|
||||
<ActionMenu.Anchor>
|
||||
<Button variant="invisible" sx={{ fontWeight: 'normal', color: 'var(--vscode-editor-foreground);', padding: 0 }} >
|
||||
{getCodeFlowName(codeFlows[0])}
|
||||
<TriangleDownIcon size={16} />
|
||||
</Button>
|
||||
</ActionMenu.Anchor>
|
||||
<ActionMenu.Overlay sx={{ backgroundColor: 'var(--vscode-editor-background)' }}>
|
||||
<ActionList>
|
||||
{codeFlows.map((codeFlow, index) =>
|
||||
<ActionList.Item
|
||||
key={`codeflow-${index}'`}
|
||||
onSelect={(e: React.MouseEvent) => { setSelectedCodeFlow(codeFlow); }}>
|
||||
{getCodeFlowName(codeFlow)}
|
||||
</ActionList.Item>
|
||||
)}
|
||||
</ActionList>
|
||||
</ActionMenu.Overlay>
|
||||
</ActionMenu>;
|
||||
};
|
||||
|
||||
const CodePaths = ({
|
||||
codeFlows,
|
||||
ruleDescription,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
ruleDescription: string,
|
||||
message: AnalysisMessage,
|
||||
severity: ResultSeverity
|
||||
}) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [selectedCodeFlow, setSelectedCodeFlow] = useState(codeFlows[0]);
|
||||
|
||||
const anchorRef = useRef<HTMLDivElement>(null);
|
||||
const linkRef = useRef<HTMLAnchorElement>(null);
|
||||
|
||||
const closeOverlay = () => setIsOpen(false);
|
||||
|
||||
return (
|
||||
<Box ref={anchorRef}>
|
||||
<Link
|
||||
onClick={() => setIsOpen(true)}
|
||||
ref={linkRef}
|
||||
sx={{ cursor: 'pointer' }}>
|
||||
Show paths
|
||||
</Link>
|
||||
{isOpen && (
|
||||
<Overlay
|
||||
returnFocusRef={linkRef}
|
||||
onEscape={closeOverlay}
|
||||
onClickOutside={closeOverlay}
|
||||
anchorSide="outside-top">
|
||||
<OverlayContainer>
|
||||
<CloseButton onClick={closeOverlay} />
|
||||
|
||||
<SectionTitle>{ruleDescription}</SectionTitle>
|
||||
<VerticalSpace size={2} />
|
||||
|
||||
<Box display="flex" justifyContent="center" alignItems="center">
|
||||
<Box p={0} border="none">
|
||||
{codeFlows.length} paths available: {selectedCodeFlow.threadFlows.length} steps in
|
||||
</Box>
|
||||
<Box flexGrow={1} p={0} paddingLeft="0.2em" border="none">
|
||||
<Menu codeFlows={codeFlows} setSelectedCodeFlow={setSelectedCodeFlow} />
|
||||
</Box>
|
||||
</Box>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<CodePath
|
||||
codeFlow={selectedCodeFlow}
|
||||
severity={severity}
|
||||
message={message} />
|
||||
|
||||
<VerticalSpace size={3} />
|
||||
|
||||
</OverlayContainer>
|
||||
</Overlay>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default CodePaths;
|
||||
@@ -6,6 +6,7 @@ import { useState } from 'react';
|
||||
const Container = styled.div`
|
||||
display: block;
|
||||
vertical-align: middle;
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
||||
const TitleContainer = styled.span`
|
||||
@@ -15,6 +16,7 @@ const TitleContainer = styled.span`
|
||||
const Button = styled.button`
|
||||
display: inline-block;
|
||||
background-color: transparent;
|
||||
color: var(--vscode-editor-foreground);
|
||||
border: none;
|
||||
padding-left: 0;
|
||||
padding-right: 0.1em;
|
||||
@@ -30,8 +32,8 @@ const CollapsibleItem = ({
|
||||
const [isExpanded, setExpanded] = useState(false);
|
||||
return (
|
||||
<>
|
||||
<Container>
|
||||
<Button onClick={() => setExpanded(!isExpanded)}>
|
||||
<Container onClick={() => setExpanded(!isExpanded)}>
|
||||
<Button>
|
||||
{isExpanded
|
||||
? <ChevronDownIcon size={16} />
|
||||
: <ChevronRightIcon size={16} />
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
import * as octicons from '../../view/octicons';
|
||||
import styled from 'styled-components';
|
||||
import { DownloadIcon } from '@primer/octicons-react';
|
||||
|
||||
const ButtonLink = styled.a`
|
||||
display: inline-block;
|
||||
@@ -16,7 +16,7 @@ const ButtonLink = styled.a`
|
||||
|
||||
const DownloadButton = ({ text, onClick }: { text: string, onClick: () => void }) => (
|
||||
<ButtonLink onClick={onClick}>
|
||||
{octicons.download}{text}
|
||||
<DownloadIcon size={16} />{text}
|
||||
</ButtonLink>
|
||||
);
|
||||
|
||||
|
||||
228
extensions/ql-vscode/src/remote-queries/view/FileCodeSnippet.tsx
Normal file
228
extensions/ql-vscode/src/remote-queries/view/FileCodeSnippet.tsx
Normal file
@@ -0,0 +1,228 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeSnippet, FileLink, HighlightedRegion, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import { Box, Link } from '@primer/react';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
import { createRemoteFileRef } from '../../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../../pure/sarif-utils';
|
||||
|
||||
const borderColor = 'var(--vscode-editor-snippetFinalTabstopHighlightBorder)';
|
||||
const warningColor = '#966C23';
|
||||
const highlightColor = 'var(--vscode-editor-findMatchHighlightBackground)';
|
||||
|
||||
const getSeverityColor = (severity: ResultSeverity) => {
|
||||
switch (severity) {
|
||||
case 'Recommendation':
|
||||
return 'blue';
|
||||
case 'Warning':
|
||||
return warningColor;
|
||||
case 'Error':
|
||||
return 'red';
|
||||
}
|
||||
};
|
||||
|
||||
const replaceSpaceAndTabChar = (text: string) => text.replaceAll(' ', '\u00a0').replaceAll('\t', '\u00a0\u00a0\u00a0\u00a0');
|
||||
|
||||
const Container = styled.div`
|
||||
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, Liberation Mono, monospace;
|
||||
font-size: x-small;
|
||||
`;
|
||||
|
||||
const TitleContainer = styled.div`
|
||||
border: 0.1em solid ${borderColor};
|
||||
border-top-left-radius: 0.2em;
|
||||
border-top-right-radius: 0.2em;
|
||||
padding: 0.5em;
|
||||
`;
|
||||
|
||||
const CodeContainer = styled.div`
|
||||
border-left: 0.1em solid ${borderColor};
|
||||
border-right: 0.1em solid ${borderColor};
|
||||
border-bottom: 0.1em solid ${borderColor};
|
||||
border-bottom-left-radius: 0.2em;
|
||||
border-bottom-right-radius: 0.2em;
|
||||
padding-top: 1em;
|
||||
padding-bottom: 1em;
|
||||
`;
|
||||
|
||||
const MessageText = styled.div`
|
||||
font-size: x-small;
|
||||
padding-left: 0.5em;
|
||||
`;
|
||||
|
||||
const MessageContainer = styled.div`
|
||||
padding-top: 0.5em;
|
||||
padding-bottom: 0.5em;
|
||||
`;
|
||||
|
||||
const PlainLine = ({ text }: { text: string }) => {
|
||||
return <span>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const HighlightedLine = ({ text }: { text: string }) => {
|
||||
return <span style={{ backgroundColor: highlightColor }}>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const Message = ({
|
||||
message,
|
||||
currentLineNumber,
|
||||
highlightedRegion,
|
||||
borderColor,
|
||||
children
|
||||
}: {
|
||||
message: AnalysisMessage,
|
||||
currentLineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
borderColor: string,
|
||||
children: React.ReactNode
|
||||
}) => {
|
||||
if (!highlightedRegion || highlightedRegion.endLine !== currentLineNumber) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return <MessageContainer>
|
||||
<Box
|
||||
borderColor="border.default"
|
||||
borderWidth={1}
|
||||
borderStyle="solid"
|
||||
borderLeftColor={borderColor}
|
||||
borderLeftWidth={3}
|
||||
paddingTop="1em"
|
||||
paddingBottom="1em">
|
||||
<MessageText>
|
||||
{message.tokens.map((token, index) => {
|
||||
switch (token.t) {
|
||||
case 'text':
|
||||
return <span key={`token-${index}`}>{token.text}</span>;
|
||||
case 'location':
|
||||
return <Link
|
||||
key={`token-${index}`}
|
||||
href={createRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine)}>
|
||||
{token.text}
|
||||
</Link>;
|
||||
default:
|
||||
return <></>;
|
||||
}
|
||||
})}
|
||||
{children && <>
|
||||
<VerticalSpace size={2} />
|
||||
{children}
|
||||
</>
|
||||
}
|
||||
</MessageText>
|
||||
</Box>
|
||||
|
||||
</MessageContainer>;
|
||||
};
|
||||
|
||||
const CodeLine = ({
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
}: {
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
}) => {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return <PlainLine text={line} />;
|
||||
}
|
||||
|
||||
const partiallyHighlightedLine = parseHighlightedLine(line, lineNumber, highlightedRegion);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PlainLine text={partiallyHighlightedLine.plainSection1} />
|
||||
<HighlightedLine text={partiallyHighlightedLine.highlightedSection} />
|
||||
<PlainLine text={partiallyHighlightedLine.plainSection2} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const FileCodeSnippet = ({
|
||||
fileLink,
|
||||
codeSnippet,
|
||||
highlightedRegion,
|
||||
severity,
|
||||
message,
|
||||
messageChildren,
|
||||
}: {
|
||||
fileLink: FileLink,
|
||||
codeSnippet?: CodeSnippet,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
|
||||
const startingLine = codeSnippet?.startLine || 0;
|
||||
const endingLine = codeSnippet?.endLine || 0;
|
||||
|
||||
const titleFileUri = createRemoteFileRef(
|
||||
fileLink,
|
||||
highlightedRegion?.startLine || startingLine,
|
||||
highlightedRegion?.endLine || endingLine);
|
||||
|
||||
if (!codeSnippet) {
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<Link href={titleFileUri}>{fileLink.filePath}</Link>
|
||||
</TitleContainer>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<Link href={titleFileUri}>{fileLink.filePath}</Link>
|
||||
</TitleContainer>
|
||||
<CodeContainer>
|
||||
{code.map((line, index) => (
|
||||
<div key={index}>
|
||||
<Box display="flex">
|
||||
<Box
|
||||
p={2}
|
||||
borderStyle="none"
|
||||
paddingTop="0.01em"
|
||||
paddingLeft="0.5em"
|
||||
paddingRight="0.5em"
|
||||
paddingBottom="0.2em">
|
||||
{startingLine + index}
|
||||
</Box>
|
||||
<Box
|
||||
flexGrow={1}
|
||||
p={2}
|
||||
borderStyle="none"
|
||||
paddingTop="0.01em"
|
||||
paddingLeft="1.5em"
|
||||
paddingRight="0.5em"
|
||||
paddingBottom="0.2em"
|
||||
sx={{ wordBreak: 'break-word' }}>
|
||||
<CodeLine
|
||||
line={line}
|
||||
lineNumber={startingLine + index}
|
||||
highlightedRegion={highlightedRegion} />
|
||||
</Box>
|
||||
</Box>
|
||||
{message && severity && <Message
|
||||
message={message}
|
||||
currentLineNumber={startingLine + index}
|
||||
highlightedRegion={highlightedRegion}
|
||||
borderColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>}
|
||||
</div>
|
||||
))}
|
||||
</CodeContainer>
|
||||
</Container>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileCodeSnippet;
|
||||
@@ -0,0 +1,91 @@
|
||||
import * as React from 'react';
|
||||
import { Box, Link } from '@primer/react';
|
||||
import { CellValue, RawResultSet, ResultSetSchema } from '../../pure/bqrs-cli-types';
|
||||
import { tryGetRemoteLocation } from '../../pure/bqrs-utils';
|
||||
import { useState } from 'react';
|
||||
import TextButton from './TextButton';
|
||||
import { convertNonPrintableChars } from '../../text-utils';
|
||||
|
||||
const numOfResultsInContractedMode = 5;
|
||||
|
||||
const Row = ({
|
||||
row,
|
||||
fileLinkPrefix
|
||||
}: {
|
||||
row: CellValue[],
|
||||
fileLinkPrefix: string
|
||||
}) => (
|
||||
<>
|
||||
{row.map((cell, cellIndex) => (
|
||||
<Box key={cellIndex}
|
||||
borderColor="border.default"
|
||||
borderStyle="solid"
|
||||
justifyContent="center"
|
||||
alignItems="center"
|
||||
p={2}
|
||||
sx={{ wordBreak: 'break-word' }}>
|
||||
<Cell value={cell} fileLinkPrefix={fileLinkPrefix} />
|
||||
</Box>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
|
||||
const Cell = ({
|
||||
value,
|
||||
fileLinkPrefix
|
||||
}: {
|
||||
value: CellValue,
|
||||
fileLinkPrefix: string
|
||||
}) => {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
return <span>{convertNonPrintableChars(value.toString())}</span>;
|
||||
case 'object': {
|
||||
const url = tryGetRemoteLocation(value.url, fileLinkPrefix);
|
||||
return <Link href={url}>{convertNonPrintableChars(value.label)}</Link>;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const RawResultsTable = ({
|
||||
schema,
|
||||
results,
|
||||
fileLinkPrefix
|
||||
}: {
|
||||
schema: ResultSetSchema,
|
||||
results: RawResultSet,
|
||||
fileLinkPrefix: string
|
||||
}) => {
|
||||
const [tableExpanded, setTableExpanded] = useState(false);
|
||||
const numOfResultsToShow = tableExpanded ? results.rows.length : numOfResultsInContractedMode;
|
||||
const showButton = results.rows.length > numOfResultsInContractedMode;
|
||||
|
||||
// Create n equal size columns. We use minmax(0, 1fr) because the
|
||||
// minimum width of 1fr is auto, not 0.
|
||||
// https://css-tricks.com/equal-width-columns-in-css-grid-are-kinda-weird/
|
||||
const gridTemplateColumns = `repeat(${schema.columns.length}, minmax(0, 1fr))`;
|
||||
|
||||
return (
|
||||
<>
|
||||
<Box
|
||||
display="grid"
|
||||
gridTemplateColumns={gridTemplateColumns}
|
||||
maxWidth="45rem"
|
||||
p={2}>
|
||||
{results.rows.slice(0, numOfResultsToShow).map((row, rowIndex) => (
|
||||
<Row key={rowIndex} row={row} fileLinkPrefix={fileLinkPrefix} />
|
||||
))}
|
||||
</Box>
|
||||
{
|
||||
showButton &&
|
||||
<TextButton size='x-small' onClick={() => setTableExpanded(!tableExpanded)}>
|
||||
{tableExpanded ? (<span>View less</span>) : (<span>View all</span>)}
|
||||
</TextButton>
|
||||
}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default RawResultsTable;
|
||||
@@ -1,11 +1,10 @@
|
||||
import * as React from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import * as Rdom from 'react-dom';
|
||||
import { ThemeProvider } from '@primer/react';
|
||||
import { Box, Flash, ThemeProvider } from '@primer/react';
|
||||
import { ToRemoteQueriesMessage } from '../../pure/interface-types';
|
||||
import { AnalysisSummary, RemoteQueryResult } from '../shared/remote-query-result';
|
||||
import * as octicons from '../../view/octicons';
|
||||
|
||||
import { MAX_RAW_RESULTS } from '../shared/result-limits';
|
||||
import { vscode } from '../../view/vscode-api';
|
||||
|
||||
import SectionTitle from './SectionTitle';
|
||||
@@ -14,10 +13,14 @@ import HorizontalSpace from './HorizontalSpace';
|
||||
import Badge from './Badge';
|
||||
import ViewTitle from './ViewTitle';
|
||||
import DownloadButton from './DownloadButton';
|
||||
import { AnalysisResults } from '../shared/analysis-result';
|
||||
import { AnalysisResults, getAnalysisResultCount } from '../shared/analysis-result';
|
||||
import DownloadSpinner from './DownloadSpinner';
|
||||
import CollapsibleItem from './CollapsibleItem';
|
||||
import { FileSymlinkFileIcon } from '@primer/octicons-react';
|
||||
import { AlertIcon, CodeSquareIcon, FileCodeIcon, RepoIcon, TerminalIcon } from '@primer/octicons-react';
|
||||
import AnalysisAlertResult from './AnalysisAlertResult';
|
||||
import RawResultsTable from './RawResultsTable';
|
||||
import RepositoriesSearch from './RepositoriesSearch';
|
||||
import ActionButton from './ActionButton';
|
||||
|
||||
const numOfReposInContractedMode = 10;
|
||||
|
||||
@@ -26,12 +29,15 @@ const emptyQueryResult: RemoteQueryResult = {
|
||||
queryFileName: '',
|
||||
queryFilePath: '',
|
||||
queryText: '',
|
||||
language: '',
|
||||
workflowRunUrl: '',
|
||||
totalRepositoryCount: 0,
|
||||
affectedRepositoryCount: 0,
|
||||
totalResultCount: 0,
|
||||
executionTimestamp: '',
|
||||
executionDuration: '',
|
||||
analysisSummaries: []
|
||||
analysisSummaries: [],
|
||||
analysisFailures: [],
|
||||
};
|
||||
|
||||
const downloadAnalysisResults = (analysisSummary: AnalysisSummary) => {
|
||||
@@ -48,13 +54,6 @@ const downloadAllAnalysesResults = (query: RemoteQueryResult) => {
|
||||
});
|
||||
};
|
||||
|
||||
const viewAnalysisResults = (analysisSummary: AnalysisSummary) => {
|
||||
vscode.postMessage({
|
||||
t: 'remoteQueryViewAnalysisResults',
|
||||
analysisSummary
|
||||
});
|
||||
};
|
||||
|
||||
const openQueryFile = (queryResult: RemoteQueryResult) => {
|
||||
vscode.postMessage({
|
||||
t: 'openFile',
|
||||
@@ -70,27 +69,60 @@ const openQueryTextVirtualFile = (queryResult: RemoteQueryResult) => {
|
||||
};
|
||||
|
||||
const sumAnalysesResults = (analysesResults: AnalysisResults[]) =>
|
||||
analysesResults.reduce((acc, curr) => acc + curr.results.length, 0);
|
||||
analysesResults.reduce((acc, curr) => acc + getAnalysisResultCount(curr), 0);
|
||||
|
||||
const QueryInfo = (queryResult: RemoteQueryResult) => (
|
||||
<>
|
||||
<VerticalSpace size={1} />
|
||||
{queryResult.totalResultCount} results in {queryResult.totalRepositoryCount} repositories
|
||||
{queryResult.totalResultCount} results from running against {queryResult.totalRepositoryCount} repositories
|
||||
({queryResult.executionDuration}), {queryResult.executionTimestamp}
|
||||
<VerticalSpace size={1} />
|
||||
<span className="vscode-codeql__query-file">{octicons.file}
|
||||
<a className="vscode-codeql__query-file-link" href="#" onClick={() => openQueryFile(queryResult)}>
|
||||
<span>
|
||||
<a className="vscode-codeql__query-info-link" href="#" onClick={() => openQueryFile(queryResult)}>
|
||||
<span> <FileCodeIcon size={16} /> </span>
|
||||
{queryResult.queryFileName}
|
||||
</a>
|
||||
</span>
|
||||
<span>{octicons.codeSquare}
|
||||
<a className="vscode-codeql__query-file-link" href="#" onClick={() => openQueryTextVirtualFile(queryResult)}>
|
||||
query
|
||||
<span>
|
||||
<a className="vscode-codeql__query-info-link" href="#" onClick={() => openQueryTextVirtualFile(queryResult)}>
|
||||
<span> <CodeSquareIcon size={16} /> </span>
|
||||
Query
|
||||
</a>
|
||||
</span>
|
||||
<span>
|
||||
<a className="vscode-codeql__query-info-link" href={queryResult.workflowRunUrl}>
|
||||
<span> <TerminalIcon size={16} /> </span>
|
||||
Logs
|
||||
</a>
|
||||
</span>
|
||||
</>
|
||||
);
|
||||
|
||||
const Failures = (queryResult: RemoteQueryResult) => {
|
||||
if (queryResult.analysisFailures.length === 0) {
|
||||
return <></>;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<VerticalSpace size={3} />
|
||||
<Flash variant="danger">
|
||||
{queryResult.analysisFailures.map((f, i) => (
|
||||
<div key={i}>
|
||||
<p className="vscode-codeql__analysis-failure">
|
||||
<AlertIcon size={16} />
|
||||
<b>{f.nwo}: </b>
|
||||
{f.error}
|
||||
</p>
|
||||
{
|
||||
i === queryResult.analysisFailures.length - 1 ? <></> : <VerticalSpace size={1} />
|
||||
}
|
||||
</div>
|
||||
))}
|
||||
</Flash>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const SummaryTitleWithResults = ({
|
||||
queryResult,
|
||||
analysesResults
|
||||
@@ -118,7 +150,7 @@ const SummaryTitleNoResults = () => (
|
||||
</div>
|
||||
);
|
||||
|
||||
const SummaryItemDownloadAndView = ({
|
||||
const SummaryItemDownload = ({
|
||||
analysisSummary,
|
||||
analysisResults
|
||||
}: {
|
||||
@@ -138,13 +170,7 @@ const SummaryItemDownloadAndView = ({
|
||||
</>;
|
||||
}
|
||||
|
||||
return <>
|
||||
<HorizontalSpace size={2} />
|
||||
<a className="vscode-codeql__analysis-result-file-link"
|
||||
onClick={() => viewAnalysisResults(analysisSummary)} >
|
||||
<FileSymlinkFileIcon size={16} />
|
||||
</a>
|
||||
</>;
|
||||
return <></>;
|
||||
};
|
||||
|
||||
const SummaryItem = ({
|
||||
@@ -155,11 +181,11 @@ const SummaryItem = ({
|
||||
analysisResults: AnalysisResults | undefined
|
||||
}) => (
|
||||
<span>
|
||||
<span className="vscode-codeql__analysis-item">{octicons.repo}</span>
|
||||
<span className="vscode-codeql__analysis-item"><RepoIcon size={16} /></span>
|
||||
<span className="vscode-codeql__analysis-item">{analysisSummary.nwo}</span>
|
||||
<span className="vscode-codeql__analysis-item"><Badge text={analysisSummary.resultCount.toString()} /></span>
|
||||
<span className="vscode-codeql__analysis-item">
|
||||
<SummaryItemDownloadAndView
|
||||
<SummaryItemDownload
|
||||
analysisSummary={analysisSummary}
|
||||
analysisResults={analysisResults} />
|
||||
</span>
|
||||
@@ -186,7 +212,7 @@ const Summary = ({
|
||||
analysesResults={analysesResults} />
|
||||
}
|
||||
|
||||
<ul className="vscode-codeql__analysis-summaries-list">
|
||||
<ul className="vscode-codeql__flat-list">
|
||||
{queryResult.analysisSummaries.slice(0, numOfReposToShow).map((summary, i) =>
|
||||
<li key={summary.nwo} className="vscode-codeql__analysis-summaries-list-item">
|
||||
<SummaryItem
|
||||
@@ -213,33 +239,79 @@ const AnalysesResultsTitle = ({ totalAnalysesResults, totalResults }: { totalAna
|
||||
return <SectionTitle>{totalAnalysesResults}/{totalResults} results</SectionTitle>;
|
||||
};
|
||||
|
||||
const AnalysesResultsDescription = ({ totalAnalysesResults, totalResults }: { totalAnalysesResults: number, totalResults: number }) => {
|
||||
if (totalAnalysesResults < totalResults) {
|
||||
return <>
|
||||
<VerticalSpace size={1} />
|
||||
Some results haven't been downloaded automatically because of their size or because enough were downloaded already.
|
||||
Download them manually from the list above if you want to see them here.
|
||||
</>;
|
||||
}
|
||||
const exportResults = () => {
|
||||
vscode.postMessage({
|
||||
t: 'remoteQueryExportResults',
|
||||
});
|
||||
};
|
||||
|
||||
return <></>;
|
||||
const AnalysesResultsDescription = ({
|
||||
queryResult,
|
||||
analysesResults,
|
||||
}: {
|
||||
queryResult: RemoteQueryResult
|
||||
analysesResults: AnalysisResults[],
|
||||
}) => {
|
||||
const showDownloadsMessage = queryResult.analysisSummaries.some(
|
||||
s => !analysesResults.some(a => a.nwo === s.nwo && a.status === 'Completed'));
|
||||
const downloadsMessage = <>
|
||||
<VerticalSpace size={1} />
|
||||
Some results haven't been downloaded automatically because of their size or because enough were downloaded already.
|
||||
Download them manually from the list above if you want to see them here.
|
||||
</>;
|
||||
|
||||
const showMaxResultsMessage = analysesResults.some(a => a.rawResults?.capped);
|
||||
const maxRawResultsMessage = <>
|
||||
<VerticalSpace size={1} />
|
||||
Some repositories have more than {MAX_RAW_RESULTS} results. We will only show you up to
|
||||
{MAX_RAW_RESULTS} results for each repository.
|
||||
</>;
|
||||
|
||||
return (
|
||||
<>
|
||||
{showDownloadsMessage && downloadsMessage}
|
||||
{showMaxResultsMessage && maxRawResultsMessage}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const RepoAnalysisResults = (analysisResults: AnalysisResults) => {
|
||||
const numOfResults = getAnalysisResultCount(analysisResults);
|
||||
const title = <>
|
||||
{analysisResults.nwo}
|
||||
<Badge text={analysisResults.results.length.toString()} />
|
||||
<Badge text={numOfResults.toString()} />
|
||||
</>;
|
||||
|
||||
return (
|
||||
<CollapsibleItem title={title}>
|
||||
{analysisResults.results.map((r, i) => (<p key={i} >{r.message}</p>))}
|
||||
<ul className="vscode-codeql__flat-list" >
|
||||
{analysisResults.interpretedResults.map((r, i) =>
|
||||
<li key={i}>
|
||||
<AnalysisAlertResult alert={r} />
|
||||
<VerticalSpace size={2} />
|
||||
</li>)}
|
||||
</ul>
|
||||
{analysisResults.rawResults &&
|
||||
<RawResultsTable
|
||||
schema={analysisResults.rawResults.schema}
|
||||
results={analysisResults.rawResults.resultSet}
|
||||
fileLinkPrefix={analysisResults.rawResults.fileLinkPrefix} />
|
||||
}
|
||||
</CollapsibleItem>
|
||||
);
|
||||
};
|
||||
|
||||
const AnalysesResults = ({ analysesResults, totalResults }: { analysesResults: AnalysisResults[], totalResults: number }) => {
|
||||
const AnalysesResults = ({
|
||||
queryResult,
|
||||
analysesResults,
|
||||
totalResults
|
||||
}: {
|
||||
queryResult: RemoteQueryResult,
|
||||
analysesResults: AnalysisResults[],
|
||||
totalResults: number
|
||||
}) => {
|
||||
const totalAnalysesResults = sumAnalysesResults(analysesResults);
|
||||
const [filterValue, setFilterValue] = React.useState('');
|
||||
|
||||
if (totalResults === 0) {
|
||||
return <></>;
|
||||
@@ -248,17 +320,33 @@ const AnalysesResults = ({ analysesResults, totalResults }: { analysesResults: A
|
||||
return (
|
||||
<>
|
||||
<VerticalSpace size={2} />
|
||||
<AnalysesResultsTitle
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
<Box display="flex">
|
||||
<Box flexGrow={1}>
|
||||
<AnalysesResultsTitle
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
</Box>
|
||||
<Box>
|
||||
<ActionButton text="Export all" onClick={exportResults}></ActionButton>
|
||||
</Box>
|
||||
</Box>
|
||||
<AnalysesResultsDescription
|
||||
totalAnalysesResults={totalAnalysesResults}
|
||||
totalResults={totalResults} />
|
||||
<ul className="vscode-codeql__analyses-results-list">
|
||||
{analysesResults.filter(a => a.results.length > 0).map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults} />
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<RepositoriesSearch
|
||||
filterValue={filterValue}
|
||||
setFilterValue={setFilterValue} />
|
||||
|
||||
<ul className="vscode-codeql__flat-list">
|
||||
{analysesResults
|
||||
.filter(a => a.interpretedResults.length > 0 || a.rawResults)
|
||||
.filter(a => a.nwo.toLowerCase().includes(filterValue.toLowerCase()))
|
||||
.map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
</ul>
|
||||
</>
|
||||
);
|
||||
@@ -289,17 +377,21 @@ export function RemoteQueries(): JSX.Element {
|
||||
return <div>Waiting for results to load.</div>;
|
||||
}
|
||||
|
||||
const showAnalysesResults = false;
|
||||
|
||||
try {
|
||||
return <div>
|
||||
<ThemeProvider>
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Summary queryResult={queryResult} analysesResults={analysesResults} />
|
||||
{showAnalysesResults && <AnalysesResults analysesResults={analysesResults} totalResults={queryResult.totalResultCount} />}
|
||||
</ThemeProvider>
|
||||
</div>;
|
||||
return (
|
||||
<div className="vscode-codeql__remote-queries">
|
||||
<ThemeProvider colorMode="auto">
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Failures {...queryResult} />
|
||||
<Summary queryResult={queryResult} analysesResults={analysesResults} />
|
||||
<AnalysesResults
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults}
|
||||
totalResults={queryResult.totalResultCount} />
|
||||
</ThemeProvider>
|
||||
</div>
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return <div>There was an error displaying the view.</div>;
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
import * as React from 'react';
|
||||
import { ChangeEvent } from 'react';
|
||||
import { TextInput } from '@primer/react';
|
||||
import { SearchIcon } from '@primer/octicons-react';
|
||||
|
||||
interface RepositoriesSearchProps {
|
||||
filterValue: string;
|
||||
setFilterValue: (value: string) => void;
|
||||
}
|
||||
|
||||
const RepositoriesSearch = ({ filterValue, setFilterValue }: RepositoriesSearchProps) => {
|
||||
return <>
|
||||
<TextInput
|
||||
block
|
||||
sx={{
|
||||
backgroundColor: 'var(--vscode-editor-background);',
|
||||
color: 'var(--vscode-editor-foreground);',
|
||||
width: 'calc(100% - 14px)',
|
||||
}}
|
||||
leadingVisual={SearchIcon}
|
||||
aria-label="Repository search"
|
||||
name="repository-search"
|
||||
placeholder="Filter by repository owner/name"
|
||||
value={filterValue}
|
||||
onChange={(e: ChangeEvent) => setFilterValue((e.target as HTMLInputElement).value)}
|
||||
/>
|
||||
</>;
|
||||
};
|
||||
|
||||
export default RepositoriesSearch;
|
||||
30
extensions/ql-vscode/src/remote-queries/view/TextButton.tsx
Normal file
30
extensions/ql-vscode/src/remote-queries/view/TextButton.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
type Size = 'x-small' | 'small' | 'medium' | 'large' | 'x-large';
|
||||
|
||||
const StyledButton = styled.button<{ size: Size }>`
|
||||
background: none;
|
||||
color: var(--vscode-textLink-foreground);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
font-size: ${props => props.size};
|
||||
`;
|
||||
|
||||
const TextButton = ({
|
||||
size,
|
||||
onClick,
|
||||
children
|
||||
}: {
|
||||
size: Size,
|
||||
onClick: () => void,
|
||||
children: React.ReactNode
|
||||
}) => (
|
||||
<StyledButton
|
||||
size={size}
|
||||
onClick={onClick}>
|
||||
{children}
|
||||
</StyledButton>
|
||||
);
|
||||
|
||||
export default TextButton;
|
||||
@@ -1,26 +1,14 @@
|
||||
.octicon {
|
||||
fill: var(--vscode-editor-foreground);
|
||||
height: 1.2em;
|
||||
width: 1.2em;
|
||||
vertical-align: middle;
|
||||
display: inline-block;
|
||||
.vscode-codeql__remote-queries {
|
||||
max-width: 55em;
|
||||
}
|
||||
|
||||
.octicon-light {
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.vscode-codeql__query-file {
|
||||
padding-right: 1em;
|
||||
}
|
||||
|
||||
.vscode-codeql__query-file-link {
|
||||
.vscode-codeql__query-info-link {
|
||||
text-decoration: none;
|
||||
padding-left: 0.3em;
|
||||
padding-right: 1em;
|
||||
color: var(--vscode-editor-foreground);
|
||||
}
|
||||
|
||||
.vscode-codeql__query-file-link:hover {
|
||||
.vscode-codeql__query-info-link:hover {
|
||||
color: var(--vscode-editor-foreground);
|
||||
}
|
||||
|
||||
@@ -28,22 +16,10 @@
|
||||
padding-top: 1.5em;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-summaries-list {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0.5em 0 0 0;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-summaries-list-item {
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
|
||||
.vscode-codeql__analyses-results-list {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0.5em 0 0 0;
|
||||
}
|
||||
|
||||
.vscode-codeql__analyses-results-list-item {
|
||||
padding-top: 0.5em;
|
||||
}
|
||||
@@ -61,6 +37,15 @@
|
||||
font-size: x-small;
|
||||
}
|
||||
|
||||
.vscode-codeql__analysis-result-file-link {
|
||||
vertical-align: middle;
|
||||
.vscode-codeql__analysis-failure {
|
||||
margin: 0;
|
||||
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
|
||||
Liberation Mono, monospace;
|
||||
color: var(--vscode-editor-foreground);
|
||||
}
|
||||
|
||||
.vscode-codeql__flat-list {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0.5em 0 0 0;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": ["es6", "dom"],
|
||||
"lib": ["ES2021", "dom"],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import { nanoid } from 'nanoid';
|
||||
import {
|
||||
CancellationToken,
|
||||
@@ -10,45 +10,50 @@ import {
|
||||
TextDocument,
|
||||
TextEditor,
|
||||
Uri,
|
||||
window,
|
||||
workspace
|
||||
window
|
||||
} from 'vscode';
|
||||
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
|
||||
|
||||
import * as cli from './cli';
|
||||
import * as config from './config';
|
||||
import { DatabaseItem, DatabaseManager } from './databases';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tryGetQueryMetadata } from './helpers';
|
||||
import {
|
||||
createTimestampFile,
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
tryGetQueryMetadata,
|
||||
upgradesTmpDir
|
||||
} from './helpers';
|
||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { DatabaseInfo, QueryMetadata } from './pure/interface-types';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { InitialQueryInfo } from './query-results';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { isQuickQueryPath } from './quick-query';
|
||||
import { compileDatabaseUpgradeSequence, hasNondestructiveUpgradeCapabilities, upgradeDatabaseExplicit } from './upgrades';
|
||||
import { ensureMetadataIsComplete } from './query-results';
|
||||
import { SELECT_QUERY_NAME } from './contextual/locationFinder';
|
||||
import { DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
* -------------
|
||||
* --------------
|
||||
*
|
||||
* Compiling and running QL queries.
|
||||
*/
|
||||
|
||||
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
|
||||
export const upgradesTmpDir = tmp.dirSync({ dir: tmpDir.name, prefix: 'upgrades_', keep: false, unsafeCleanup: true });
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
upgradesTmpDir.removeCallback();
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
};
|
||||
|
||||
// exported for testing
|
||||
export const queriesDir = path.join(tmpDir.name, 'queries');
|
||||
/**
|
||||
* Information about which query will be to be run. `quickEvalPosition` and `quickEvalText`
|
||||
* is only filled in if the query is a quick query.
|
||||
*/
|
||||
interface SelectedQuery {
|
||||
queryPath: string;
|
||||
quickEvalPosition?: messages.Position;
|
||||
quickEvalText?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A collection of evaluation-time information about a query,
|
||||
@@ -57,14 +62,13 @@ export const queriesDir = path.join(tmpDir.name, 'queries');
|
||||
* output and results.
|
||||
*/
|
||||
export class QueryEvaluationInfo {
|
||||
readonly querySaveDir: string;
|
||||
|
||||
/**
|
||||
* Note that in the {@link FullQueryInfo.slurp} method, we create a QueryEvaluationInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public readonly querySaveDir: string,
|
||||
public readonly dbItemPath: string,
|
||||
private readonly databaseHasMetadataFile: boolean,
|
||||
public readonly queryDbscheme: string, // the dbscheme file the query expects, based on library path resolution
|
||||
@@ -72,7 +76,7 @@ export class QueryEvaluationInfo {
|
||||
public readonly metadata?: QueryMetadata,
|
||||
public readonly templates?: messages.TemplateDefinitions
|
||||
) {
|
||||
this.querySaveDir = path.join(queriesDir, this.id);
|
||||
/**/
|
||||
}
|
||||
|
||||
get dilPath() {
|
||||
@@ -87,10 +91,30 @@ export class QueryEvaluationInfo {
|
||||
return path.join(this.querySaveDir, 'compiledQuery.qlo');
|
||||
}
|
||||
|
||||
get logPath() {
|
||||
return qsClient.findQueryLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogPath() {
|
||||
return qsClient.findQueryEvalLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogSummaryPath() {
|
||||
return qsClient.findQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogEndSummaryPath() {
|
||||
return qsClient.findQueryEvalLogEndSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, 'results.bqrs'),
|
||||
interpretedResultsPath: path.join(this.querySaveDir, 'interpretedResults.sarif'),
|
||||
interpretedResultsPath: path.join(this.querySaveDir,
|
||||
this.metadata?.kind === 'graph'
|
||||
? 'graphResults'
|
||||
: 'interpretedResults.sarif'
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -98,6 +122,14 @@ export class QueryEvaluationInfo {
|
||||
return path.join(this.querySaveDir, `sortedResults-${resultSetName}.bqrs`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the query directory that indicates when this query was created.
|
||||
* This is important for keeping track of when queries should be removed.
|
||||
*/
|
||||
async createTimestampFile() {
|
||||
await createTimestampFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
async run(
|
||||
qs: qsClient.QueryServerClient,
|
||||
upgradeQlo: string | undefined,
|
||||
@@ -105,6 +137,7 @@ export class QueryEvaluationInfo {
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
queryInfo?: LocalQueryInfo,
|
||||
): Promise<messages.EvaluationResult> {
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
@@ -112,7 +145,12 @@ export class QueryEvaluationInfo {
|
||||
|
||||
let result: messages.EvaluationResult | null = null;
|
||||
|
||||
const callbackId = qs.registerCallback(res => { result = res; });
|
||||
const callbackId = qs.registerCallback(res => {
|
||||
result = {
|
||||
...res,
|
||||
logFileLocation: this.logPath
|
||||
};
|
||||
});
|
||||
|
||||
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString(true) }));
|
||||
|
||||
@@ -126,10 +164,18 @@ export class QueryEvaluationInfo {
|
||||
id: callbackId,
|
||||
timeoutSecs: qs.config.timeoutSecs,
|
||||
};
|
||||
|
||||
const dataset: messages.Dataset = {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
};
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.startLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
|
||||
}
|
||||
const params: messages.EvaluateQueriesParams = {
|
||||
db: dataset,
|
||||
evaluateId: callbackId,
|
||||
@@ -139,8 +185,39 @@ export class QueryEvaluationInfo {
|
||||
};
|
||||
try {
|
||||
await qs.sendRequest(messages.runQueries, params, token, progress);
|
||||
if (qs.config.customLogDirectory) {
|
||||
void showAndLogWarningMessage(
|
||||
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.logPath}.`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
qs.unRegisterCallback(callbackId);
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.endLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
if (await this.hasEvalLog()) {
|
||||
queryInfo.evalLogLocation = this.evalLogPath;
|
||||
void qs.cliServer.generateLogSummary(this.evalLogPath, this.evalLogSummaryPath, this.evalLogEndSummaryPath)
|
||||
.then(() => {
|
||||
queryInfo.evalLogSummaryLocation = this.evalLogSummaryPath;
|
||||
fs.readFile(this.evalLogEndSummaryPath, (err, buffer) => {
|
||||
if (err) {
|
||||
throw new Error(`Could not read structured evaluator log end of summary file at ${this.evalLogEndSummaryPath}.`);
|
||||
}
|
||||
void qs.logger.log(' --- Evaluator Log Summary --- ');
|
||||
void qs.logger.log(buffer.toString());
|
||||
});
|
||||
})
|
||||
|
||||
.catch(err => {
|
||||
void showAndLogWarningMessage(`Failed to generate structured evaluator log summary. Reason: ${err.message}`);
|
||||
});
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result || {
|
||||
evaluationTime: 0,
|
||||
@@ -183,7 +260,7 @@ export class QueryEvaluationInfo {
|
||||
|
||||
compiled = await qs.sendRequest(messages.compileQuery, params, token, progress);
|
||||
} finally {
|
||||
void qs.logger.log(' - - - COMPILATION DONE - - - ');
|
||||
void qs.logger.log(' - - - COMPILATION DONE - - - ', { additionalLogLocation: this.logPath });
|
||||
}
|
||||
return (compiled?.messages || []).filter(msg => msg.severity === messages.Severity.ERROR);
|
||||
}
|
||||
@@ -197,16 +274,21 @@ export class QueryEvaluationInfo {
|
||||
return false;
|
||||
}
|
||||
|
||||
const hasKind = !!this.metadata?.kind;
|
||||
const kind = this.metadata?.kind;
|
||||
const hasKind = !!kind;
|
||||
if (!hasKind) {
|
||||
void logger.log('Cannot produce interpreted results since the query does not have @kind metadata.');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Graph queries only return interpreted results if we are in canary mode.
|
||||
if (kind === 'graph') {
|
||||
return config.isCanary();
|
||||
}
|
||||
|
||||
// table is the default query kind. It does not produce interpreted results.
|
||||
// any query kind that is not table can, in principle, produce interpreted results.
|
||||
const isTable = hasKind && this.metadata?.kind === 'table';
|
||||
return !isTable;
|
||||
return kind !== 'table';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -230,6 +312,10 @@ export class QueryEvaluationInfo {
|
||||
return fs.pathExists(this.csvPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path to the DIL file produced by this query. If the query has not yet produced DIL,
|
||||
* this will return first create the DIL file and then return the path to the DIL file.
|
||||
*/
|
||||
async ensureDilPath(qs: qsClient.QueryServerClient): Promise<string> {
|
||||
if (await this.hasDil()) {
|
||||
return this.dilPath;
|
||||
@@ -245,6 +331,17 @@ export class QueryEvaluationInfo {
|
||||
return this.dilPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has a completed structured evaluator log
|
||||
*/
|
||||
async hasEvalLog(): Promise<boolean> {
|
||||
return fs.pathExists(this.evalLogPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CSV file containing the results of this query. This will only be called if the query
|
||||
* does not have interpreted results and the CSV file does not already exist.
|
||||
*/
|
||||
async exportCsvResults(qs: qsClient.QueryServerClient, csvPath: string, onFinish: () => void): Promise<void> {
|
||||
let stopDecoding = false;
|
||||
const out = fs.createWriteStream(csvPath);
|
||||
@@ -261,14 +358,21 @@ export class QueryEvaluationInfo {
|
||||
pageSize: 100,
|
||||
offset: nextOffset,
|
||||
});
|
||||
for (const tuple of chunk.tuples)
|
||||
for (const tuple of chunk.tuples) {
|
||||
out.write(tuple.join(',') + '\n');
|
||||
}
|
||||
nextOffset = chunk.next;
|
||||
}
|
||||
out.end();
|
||||
}
|
||||
|
||||
async ensureCsvProduced(qs: qsClient.QueryServerClient, dbm: DatabaseManager): Promise<string> {
|
||||
/**
|
||||
* Returns the path to the CSV alerts interpretation of this query results. If CSV results have
|
||||
* not yet been produced, this will return first create the CSV results and then return the path.
|
||||
*
|
||||
* This method only works for queries with interpreted results.
|
||||
*/
|
||||
async ensureCsvAlerts(qs: qsClient.QueryServerClient, dbm: DatabaseManager): Promise<string> {
|
||||
if (await this.hasCsv()) {
|
||||
return this.csvPath;
|
||||
}
|
||||
@@ -291,6 +395,13 @@ export class QueryEvaluationInfo {
|
||||
await qs.cliServer.generateResultsCsv(ensureMetadataIsComplete(this.metadata), this.resultsPaths.resultsPath, this.csvPath, sourceInfo);
|
||||
return this.csvPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans this query's results directory.
|
||||
*/
|
||||
async deleteQuery(): Promise<void> {
|
||||
await fs.remove(this.querySaveDir);
|
||||
}
|
||||
}
|
||||
|
||||
export interface QueryWithResults {
|
||||
@@ -324,9 +435,7 @@ export async function clearCacheInDatabase(
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param filePath This needs to be equivalent to java Path.toRealPath(NO_FOLLOW_LINKS)
|
||||
*
|
||||
* @param filePath This needs to be equivalent to Java's `Path.toRealPath(NO_FOLLOW_LINKS)`
|
||||
*/
|
||||
async function convertToQlPath(filePath: string): Promise<string> {
|
||||
if (process.platform === 'win32') {
|
||||
@@ -372,9 +481,9 @@ async function getSelectedPosition(editor: TextEditor, range?: Range): Promise<m
|
||||
|
||||
/**
|
||||
* Compare the dbscheme implied by the query `query` and that of the current database.
|
||||
* If they are compatible, do nothing.
|
||||
* If they are incompatible but the database can be upgraded, suggest that upgrade.
|
||||
* If they are incompatible and the database cannot be upgraded, throw an error.
|
||||
* - If they are compatible, do nothing.
|
||||
* - If they are incompatible but the database can be upgraded, suggest that upgrade.
|
||||
* - If they are incompatible and the database cannot be upgraded, throw an error.
|
||||
*/
|
||||
async function checkDbschemeCompatibility(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
@@ -422,7 +531,9 @@ async function checkDbschemeCompatibility(
|
||||
}
|
||||
|
||||
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryEvaluationInfo): void {
|
||||
throw new Error(`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`);
|
||||
throw new Error(
|
||||
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -437,12 +548,23 @@ async function compileNonDestructiveUpgrade(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<string> {
|
||||
const searchPath = getOnDiskWorkspaceFolders();
|
||||
|
||||
if (!dbItem?.contents?.dbSchemeUri) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
const { scripts, matchesTarget } = await qs.cliServer.resolveUpgrades(dbItem.contents.dbSchemeUri.fsPath, searchPath, true, query.queryDbscheme);
|
||||
|
||||
// When packaging is used, dependencies may exist outside of the workspace and they are always on the resolved search path.
|
||||
// When packaging is not used, all dependencies are in the workspace.
|
||||
const upgradesPath = (await qs.cliServer.cliConstraints.supportsPackaging())
|
||||
? qlProgram.libraryPath
|
||||
: getOnDiskWorkspaceFolders();
|
||||
|
||||
const { scripts, matchesTarget } = await qs.cliServer.resolveUpgrades(
|
||||
dbItem.contents.dbSchemeUri.fsPath,
|
||||
upgradesPath,
|
||||
true,
|
||||
query.queryDbscheme
|
||||
);
|
||||
|
||||
if (!matchesTarget) {
|
||||
reportNoUpgradePath(qlProgram, query);
|
||||
@@ -456,7 +578,6 @@ async function compileNonDestructiveUpgrade(
|
||||
qlProgram.dbschemePath = query.queryDbscheme;
|
||||
// We are new enough that we will always support single file upgrades.
|
||||
return result.compiledUpgrade;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -501,12 +622,6 @@ async function promptUserToSaveChanges(document: TextDocument): Promise<boolean>
|
||||
return false;
|
||||
}
|
||||
|
||||
type SelectedQuery = {
|
||||
queryPath: string;
|
||||
quickEvalPosition?: messages.Position;
|
||||
quickEvalText?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines which QL file to run during an invocation of `Run Query` or `Quick Evaluation`, as follows:
|
||||
* - If the command was called by clicking on a file, then use that file.
|
||||
@@ -517,12 +632,19 @@ type SelectedQuery = {
|
||||
* @param selectedResourceUri The selected resource when the command was run.
|
||||
* @param quickEval Whether the command being run is `Quick Evaluation`.
|
||||
*/
|
||||
export async function determineSelectedQuery(selectedResourceUri: Uri | undefined, quickEval: boolean, range?: Range): Promise<SelectedQuery> {
|
||||
export async function determineSelectedQuery(
|
||||
selectedResourceUri: Uri | undefined,
|
||||
quickEval: boolean,
|
||||
range?: Range
|
||||
): Promise<SelectedQuery> {
|
||||
const editor = window.activeTextEditor;
|
||||
|
||||
// Choose which QL file to use.
|
||||
let queryUri: Uri;
|
||||
if (selectedResourceUri === undefined) {
|
||||
if (selectedResourceUri) {
|
||||
// A resource was passed to the command handler, so use it.
|
||||
queryUri = selectedResourceUri;
|
||||
} else {
|
||||
// No resource was passed to the command handler, so obtain it from the active editor.
|
||||
// This usually happens when the command is called from the Command Palette.
|
||||
if (editor === undefined) {
|
||||
@@ -530,9 +652,6 @@ export async function determineSelectedQuery(selectedResourceUri: Uri | undefine
|
||||
} else {
|
||||
queryUri = editor.document.uri;
|
||||
}
|
||||
} else {
|
||||
// A resource was passed to the command handler, so use it.
|
||||
queryUri = selectedResourceUri;
|
||||
}
|
||||
|
||||
if (queryUri.scheme !== 'file') {
|
||||
@@ -588,9 +707,11 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: messages.TemplateDefinitions,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
@@ -632,13 +753,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
|
||||
|
||||
let availableMlModels: cli.MlModelInfo[] = [];
|
||||
// The `capabilities.untrustedWorkspaces.restrictedConfigurations` entry in package.json doesn't
|
||||
// work with hidden settings, so we manually check that the workspace is trusted before looking at
|
||||
// whether the `shouldInsecurelyLoadMlModelsFromPacks` setting is enabled.
|
||||
if (workspace.isTrusted &&
|
||||
config.isCanary() &&
|
||||
config.shouldInsecurelyLoadMlModelsFromPacks() &&
|
||||
await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
if (await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
try {
|
||||
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders)).models;
|
||||
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||
@@ -651,7 +766,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryEvaluationInfo(
|
||||
initialInfo.id,
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
packConfig.dbscheme,
|
||||
@@ -659,11 +774,13 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
metadata,
|
||||
templates
|
||||
);
|
||||
await query.createTimestampFile();
|
||||
|
||||
const upgradeDir = await tmp.dir({ dir: upgradesTmpDir.name, unsafeCleanup: true });
|
||||
let upgradeDir: tmp.DirectoryResult | undefined;
|
||||
try {
|
||||
let upgradeQlo;
|
||||
if (await hasNondestructiveUpgradeCapabilities(qs)) {
|
||||
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
|
||||
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
|
||||
} else {
|
||||
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
|
||||
@@ -680,7 +797,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token);
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
@@ -699,7 +816,10 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
// so we include a general description of the problem,
|
||||
// and direct the user to the output window for the detailed compilation messages.
|
||||
// However we don't show quick eval errors there so we need to display them anyway.
|
||||
void qs.logger.log(`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`);
|
||||
void qs.logger.log(
|
||||
`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`,
|
||||
{ additionalLogLocation: query.logPath }
|
||||
);
|
||||
|
||||
const formattedMessages: string[] = [];
|
||||
|
||||
@@ -707,7 +827,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
const message = error.message || '[no error message available]';
|
||||
const formatted = `ERROR: ${message} (${error.position.fileName}:${error.position.line}:${error.position.column}:${error.position.endLine}:${error.position.endColumn})`;
|
||||
formattedMessages.push(formatted);
|
||||
void qs.logger.log(formatted);
|
||||
void qs.logger.log(formatted, { additionalLogLocation: query.logPath });
|
||||
}
|
||||
if (initialInfo.isQuickEval && formattedMessages.length <= 2) {
|
||||
// If there are more than 2 error messages, they will not be displayed well in a popup
|
||||
@@ -722,13 +842,26 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
await upgradeDir.cleanup();
|
||||
await upgradeDir?.cleanup();
|
||||
} catch (e) {
|
||||
void qs.logger.log(`Could not clean up the upgrades dir. Reason: ${e.message || e}`);
|
||||
void qs.logger.log(
|
||||
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||
{ additionalLogLocation: query.logPath }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the initial information for a query. This is everything of interest
|
||||
* we know about this query that is available before it is run.
|
||||
*
|
||||
* @param selectedQueryUri The Uri of the document containing the query to be run.
|
||||
* @param databaseInfo The database to run the query against.
|
||||
* @param isQuickEval true if this is a quick evaluation.
|
||||
* @param range the selection range of the query to be run. Only used if isQuickEval is true.
|
||||
* @returns The initial information for the query to be run.
|
||||
*/
|
||||
export async function createInitialQueryInfo(
|
||||
selectedQueryUri: Uri | undefined,
|
||||
databaseInfo: DatabaseInfo,
|
||||
@@ -759,12 +892,14 @@ const compilationFailedErrorTail = ' compilation failed. Please make sure there
|
||||
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
|
||||
' and choose CodeQL Query Server from the dropdown.';
|
||||
|
||||
/**
|
||||
* Create a synthetic result for a query that failed to compile.
|
||||
*/
|
||||
function createSyntheticResult(
|
||||
query: QueryEvaluationInfo,
|
||||
message: string,
|
||||
resultType: number
|
||||
): QueryWithResults {
|
||||
|
||||
return {
|
||||
query,
|
||||
result: {
|
||||
|
||||
@@ -4,17 +4,18 @@ import { parser } from 'stream-json';
|
||||
import { pick } from 'stream-json/filters/Pick';
|
||||
import Assembler = require('stream-json/Assembler');
|
||||
import { chain } from 'stream-chain';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const DUMMY_TOOL : Sarif.Tool = {driver: {name: ''}};
|
||||
const DUMMY_TOOL: Sarif.Tool = { driver: { name: '' } };
|
||||
|
||||
export async function sarifParser(interpretedResultsPath: string) : Promise<Sarif.Log> {
|
||||
export async function sarifParser(interpretedResultsPath: string): Promise<Sarif.Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const p = parser();
|
||||
const pipeline = chain([
|
||||
fs.createReadStream(interpretedResultsPath),
|
||||
p,
|
||||
pick({filter: 'runs.0.results'})
|
||||
pick({ filter: 'runs.0.results' })
|
||||
]);
|
||||
|
||||
// Creates JavaScript objects from the token stream
|
||||
@@ -26,23 +27,23 @@ export async function sarifParser(interpretedResultsPath: string) : Promise<Sari
|
||||
pipeline.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
|
||||
asm.on('done', (asm) => {
|
||||
|
||||
const log : Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
const log: Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
runs: [
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
results: asm.current ?? []
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
resolve(log);
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${err.stderr || err}`);
|
||||
} catch (e) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${(e as any).stderr || getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ConfigurationTarget, Extension, ExtensionContext, ConfigurationChangeEvent } from 'vscode';
|
||||
import TelemetryReporter from 'vscode-extension-telemetry';
|
||||
import { ConfigListener, CANARY_FEATURES, ENABLE_TELEMETRY, GLOBAL_ENABLE_TELEMETRY, LOG_TELEMETRY } from './config';
|
||||
import { ConfigListener, CANARY_FEATURES, ENABLE_TELEMETRY, GLOBAL_ENABLE_TELEMETRY, LOG_TELEMETRY, isIntegrationTestMode } from './config';
|
||||
import * as appInsights from 'applicationinsights';
|
||||
import { logger } from './logging';
|
||||
import { UserCancellationException } from './commandRunner';
|
||||
@@ -162,7 +162,11 @@ export class TelemetryListener extends ConfigListener {
|
||||
if (!this.wasTelemetryRequested()) {
|
||||
// if global telemetry is disabled, avoid showing the dialog or making any changes
|
||||
let result = undefined;
|
||||
if (GLOBAL_ENABLE_TELEMETRY.getValue()) {
|
||||
if (
|
||||
GLOBAL_ENABLE_TELEMETRY.getValue() &&
|
||||
// Avoid showing the dialog if we are in integration test mode.
|
||||
!isIntegrationTestMode()
|
||||
) {
|
||||
// Extension won't start until this completes.
|
||||
result = await showBinaryChoiceWithUrlDialog(
|
||||
'Does the CodeQL Extension by GitHub have your permission to collect usage data and metrics to help us improve CodeQL for VSCode?',
|
||||
|
||||
@@ -76,7 +76,7 @@ export class QLTestAdapterFactory extends DisposableObject {
|
||||
* @param ext The new extension, including the `.`.
|
||||
*/
|
||||
function changeExtension(p: string, ext: string): string {
|
||||
return p.substr(0, p.length - path.extname(p).length) + ext;
|
||||
return p.slice(0, -path.extname(p).length) + ext;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
31
extensions/ql-vscode/src/text-utils.ts
Normal file
31
extensions/ql-vscode/src/text-utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
const CONTROL_CODE = '\u001F'.codePointAt(0)!;
|
||||
const CONTROL_LABEL = '\u2400'.codePointAt(0)!;
|
||||
|
||||
/**
|
||||
* Converts the given text so that any non-printable characters are replaced.
|
||||
* @param label The text to convert.
|
||||
* @returns The converted text.
|
||||
*/
|
||||
export function convertNonPrintableChars(label: string | undefined) {
|
||||
// If the label was empty, use a placeholder instead, so the link is still clickable.
|
||||
if (!label) {
|
||||
return '[empty string]';
|
||||
} else if (label.match(/^\s+$/)) {
|
||||
return `[whitespace: "${label}"]`;
|
||||
} else {
|
||||
/**
|
||||
* If the label contains certain non-printable characters, loop through each
|
||||
* character and replace it with the cooresponding unicode control label.
|
||||
*/
|
||||
const convertedLabelArray: any[] = [];
|
||||
for (let i = 0; i < label.length; i++) {
|
||||
const labelCheck = label.codePointAt(i)!;
|
||||
if (labelCheck <= CONTROL_CODE) {
|
||||
convertedLabelArray[i] = String.fromCodePoint(labelCheck + CONTROL_LABEL);
|
||||
} else {
|
||||
convertedLabelArray[i] = label.charAt(i);
|
||||
}
|
||||
}
|
||||
return convertedLabelArray.join('');
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage } from './helpers';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { upgradesTmpDir } from './run-queries';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
@@ -180,7 +179,7 @@ export async function upgradeDatabaseExplicit(
|
||||
if (finalDbscheme === undefined) {
|
||||
throw new Error('Could not determine target dbscheme to upgrade to.');
|
||||
}
|
||||
const currentUpgradeTmp = await tmp.dir({ dir: upgradesTmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
||||
const currentUpgradeTmp = await tmp.dir({ dir: tmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
||||
try {
|
||||
let compileUpgradeResult: messages.CompileUpgradeResult;
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { renderLocation } from './result-table-utils';
|
||||
import { ColumnValue } from '../pure/bqrs-cli-types';
|
||||
import { CellValue } from '../pure/bqrs-cli-types';
|
||||
|
||||
interface Props {
|
||||
value: ColumnValue;
|
||||
value: CellValue;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import * as Keys from '../pure/result-keys';
|
||||
import * as octicons from './octicons';
|
||||
import { className, renderLocation, ResultTableProps, zebraStripe, selectableZebraStripe, jumpToLocation, nextSortDirection, emptyQueryResultsMessage } from './result-table-utils';
|
||||
import { onNavigation, NavigationEvent } from './results';
|
||||
import { PathTableResultSet } from '../pure/interface-types';
|
||||
import { InterpretedResultSet, SarifInterpretationData } from '../pure/interface-types';
|
||||
import {
|
||||
parseSarifPlainTextMessage,
|
||||
parseSarifLocation,
|
||||
@@ -15,7 +15,7 @@ import { InterpretedResultsSortColumn, SortDirection, InterpretedResultsSortStat
|
||||
import { vscode } from './vscode-api';
|
||||
import { isWholeFileLoc, isLineColumnLoc } from '../pure/bqrs-utils';
|
||||
|
||||
export type PathTableProps = ResultTableProps & { resultSet: PathTableResultSet };
|
||||
export type PathTableProps = ResultTableProps & { resultSet: InterpretedResultSet<SarifInterpretationData> };
|
||||
export interface PathTableState {
|
||||
expanded: { [k: string]: boolean };
|
||||
selectedPathNode: undefined | Keys.PathNode;
|
||||
@@ -51,7 +51,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
}
|
||||
|
||||
sortClass(column: InterpretedResultsSortColumn): string {
|
||||
const sortState = this.props.resultSet.sortState;
|
||||
const sortState = this.props.resultSet.interpretation.data.sortState;
|
||||
if (sortState !== undefined && sortState.sortBy === column) {
|
||||
return sortState.sortDirection === SortDirection.asc ? 'sort-asc' : 'sort-desc';
|
||||
}
|
||||
@@ -61,7 +61,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
}
|
||||
|
||||
getNextSortState(column: InterpretedResultsSortColumn): InterpretedResultsSortState | undefined {
|
||||
const oldSortState = this.props.resultSet.sortState;
|
||||
const oldSortState = this.props.resultSet.interpretation.data.sortState;
|
||||
const prevDirection = oldSortState && oldSortState.sortBy === column ? oldSortState.sortDirection : undefined;
|
||||
const nextDirection = nextSortDirection(prevDirection, true);
|
||||
return nextDirection === undefined ? undefined :
|
||||
@@ -94,7 +94,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
</thead>;
|
||||
|
||||
const rows: JSX.Element[] = [];
|
||||
const { numTruncatedResults, sourceLocationPrefix } = resultSet;
|
||||
const { numTruncatedResults, sourceLocationPrefix } = resultSet.interpretation;
|
||||
|
||||
function renderRelatedLocations(msg: string, relatedLocations: Sarif.Location[]): JSX.Element[] {
|
||||
const relatedLocationsById: { [k: string]: Sarif.Location } = {};
|
||||
@@ -188,13 +188,13 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return (e) => this.toggle(e, indices);
|
||||
};
|
||||
|
||||
if (!resultSet.sarif.runs?.[0]?.results?.length) {
|
||||
if (!resultSet.interpretation.data.runs?.[0]?.results?.length) {
|
||||
return this.renderNoResults();
|
||||
}
|
||||
|
||||
let expansionIndex = 0;
|
||||
|
||||
resultSet.sarif.runs[0].results.forEach((result, resultIndex) => {
|
||||
resultSet.interpretation.data.runs[0].results.forEach((result, resultIndex) => {
|
||||
const text = result.message.text || '[no text]';
|
||||
const msg: JSX.Element[] =
|
||||
result.relatedLocations === undefined ?
|
||||
@@ -307,7 +307,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
const { selectedPathNode } = prevState;
|
||||
if (selectedPathNode === undefined) return prevState;
|
||||
|
||||
const path = Keys.getPath(this.props.resultSet.sarif, selectedPathNode);
|
||||
const path = Keys.getPath(this.props.resultSet.interpretation.data, selectedPathNode);
|
||||
if (path === undefined) return prevState;
|
||||
|
||||
const nextIndex = selectedPathNode.pathNodeIndex + event.direction;
|
||||
@@ -318,7 +318,7 @@ export class PathTable extends React.Component<PathTableProps, PathTableState> {
|
||||
return prevState;
|
||||
}
|
||||
|
||||
const loc = parseSarifLocation(sarifLoc, this.props.resultSet.sourceLocationPrefix);
|
||||
const loc = parseSarifLocation(sarifLoc, this.props.resultSet.interpretation.sourceLocationPrefix);
|
||||
if (isNoLocation(loc)) {
|
||||
return prevState;
|
||||
}
|
||||
|
||||
100
extensions/ql-vscode/src/view/graph.tsx
Normal file
100
extensions/ql-vscode/src/view/graph.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import * as React from 'react';
|
||||
import * as d3 from 'd3';
|
||||
import { ResultTableProps } from './result-table-utils';
|
||||
import { InterpretedResultSet, GraphInterpretationData } from '../pure/interface-types';
|
||||
import { graphviz } from 'd3-graphviz';
|
||||
import { jumpToLocation } from './result-table-utils';
|
||||
import { tryGetLocationFromString } from '../pure/bqrs-utils';
|
||||
export type GraphProps = ResultTableProps & { resultSet: InterpretedResultSet<GraphInterpretationData> };
|
||||
|
||||
const graphClassName = 'vscode-codeql__result-tables-graph';
|
||||
const graphId = 'graph-results';
|
||||
export class Graph extends React.Component<GraphProps> {
|
||||
constructor(props: GraphProps) {
|
||||
super(props);
|
||||
}
|
||||
|
||||
public render = (): JSX.Element => {
|
||||
const { resultSet, offset } = this.props;
|
||||
const graphData = resultSet.interpretation?.data?.dot[offset];
|
||||
|
||||
if (!graphData) {
|
||||
return <>
|
||||
<div className={graphClassName}>Graph is not available.</div>
|
||||
</>;
|
||||
}
|
||||
|
||||
return <>
|
||||
<div className={graphClassName}>
|
||||
<strong>Warning:</strong> The Graph Viewer is not a publicly released feature and will crash on large graphs.
|
||||
</div>
|
||||
<div id={graphId} className={graphClassName}><span>Rendering graph...</span></div>
|
||||
</>;
|
||||
};
|
||||
|
||||
public componentDidMount = () => {
|
||||
this.renderGraph();
|
||||
};
|
||||
|
||||
public componentDidUpdate = () => {
|
||||
this.renderGraph();
|
||||
};
|
||||
|
||||
private renderGraph = () => {
|
||||
const { databaseUri, resultSet, offset } = this.props;
|
||||
const graphData = resultSet.interpretation?.data?.dot[offset];
|
||||
|
||||
if (!graphData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options = {
|
||||
fit: true,
|
||||
fade: false,
|
||||
growEnteringEdges: false,
|
||||
zoom: true,
|
||||
};
|
||||
|
||||
const element = document.querySelector(`#${graphId}`);
|
||||
if (!element) {
|
||||
return;
|
||||
}
|
||||
element.firstChild?.remove();
|
||||
|
||||
const color = getComputedStyle(element).color;
|
||||
const backgroundColor = getComputedStyle(element).backgroundColor;
|
||||
const borderColor = getComputedStyle(element).borderColor;
|
||||
let firstPolygon = true;
|
||||
|
||||
graphviz(`#${graphId}`)
|
||||
.options(options)
|
||||
.attributer(function(d) {
|
||||
if (d.tag == 'a') {
|
||||
const url = d.attributes['xlink:href'] || d.attributes['href'];
|
||||
const loc = tryGetLocationFromString(url);
|
||||
if (loc !== undefined) {
|
||||
d.attributes['xlink:href'] = '#';
|
||||
d.attributes['href'] = '#';
|
||||
loc.uri = 'file://' + loc.uri;
|
||||
d3.select(this).on('click', function(e) { jumpToLocation(loc, databaseUri); });
|
||||
}
|
||||
}
|
||||
|
||||
if ('fill' in d.attributes) {
|
||||
d.attributes.fill = d.tag == 'text' ? color : backgroundColor;
|
||||
}
|
||||
if ('stroke' in d.attributes) {
|
||||
// There is no proper way to identify the element containing the graph (which we
|
||||
// don't want a border around), as it is just has tag 'polygon'. Instead we assume
|
||||
// that the first polygon we see is that element
|
||||
if (d.tag != 'polygon' || !firstPolygon) {
|
||||
d.attributes.stroke = borderColor;
|
||||
} else {
|
||||
firstPolygon = false;
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
.renderDot(graphData);
|
||||
};
|
||||
}
|
||||
@@ -20,23 +20,3 @@ export const listUnordered = <svg className="octicon octicon-light" width="16" h
|
||||
export const info = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg" >
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.568 1.03a6.8 6.8 0 0 1 4.192 2.02 7.06 7.06 0 0 1 .46 9.39 6.85 6.85 0 0 1-8.58 1.74 7 7 0 0 1-3.12-3.5 7.12 7.12 0 0 1-.23-4.71 7 7 0 0 1 2.77-3.79 6.8 6.8 0 0 1 4.508-1.15zm.472 12.85a5.89 5.89 0 0 0 3.41-2.07 6.07 6.07 0 0 0-.4-8.06 5.82 5.82 0 0 0-7.43-.74 6.06 6.06 0 0 0 .5 10.29 5.81 5.81 0 0 0 3.92.58zM8.51 7h-1v4h1V7zm0-2h-1v1h1V5z" />
|
||||
</svg>;
|
||||
|
||||
/**
|
||||
* The icons below come from https://primer.style/octicons/
|
||||
*/
|
||||
|
||||
export const file = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" d="M3.75 1.5a.25.25 0 00-.25.25v11.5c0 .138.112.25.25.25h8.5a.25.25 0 00.25-.25V6H9.75A1.75 1.75 0 018 4.25V1.5H3.75zm5.75.56v2.19c0 .138.112.25.25.25h2.19L9.5 2.06zM2 1.75C2 .784 2.784 0 3.75 0h5.086c.464 0 .909.184 1.237.513l3.414 3.414c.329.328.513.773.513 1.237v8.086A1.75 1.75 0 0112.25 15h-8.5A1.75 1.75 0 012 13.25V1.75z"></path>
|
||||
</svg>;
|
||||
|
||||
export const codeSquare = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" d="M1.75 1.5a.25.25 0 00-.25.25v12.5c0 .138.112.25.25.25h12.5a.25.25 0 00.25-.25V1.75a.25.25 0 00-.25-.25H1.75zM0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0114.25 16H1.75A1.75 1.75 0 010 14.25V1.75zm9.22 3.72a.75.75 0 000 1.06L10.69 8 9.22 9.47a.75.75 0 101.06 1.06l2-2a.75.75 0 000-1.06l-2-2a.75.75 0 00-1.06 0zM6.78 6.53a.75.75 0 00-1.06-1.06l-2 2a.75.75 0 000 1.06l2 2a.75.75 0 101.06-1.06L5.31 8l1.47-1.47z"></path>
|
||||
</svg>;
|
||||
|
||||
export const repo = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path>
|
||||
</svg>;
|
||||
|
||||
export const download = <svg className="octicon octicon-light" width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" d="M7.47 10.78a.75.75 0 001.06 0l3.75-3.75a.75.75 0 00-1.06-1.06L8.75 8.44V1.75a.75.75 0 00-1.5 0v6.69L4.78 5.97a.75.75 0 00-1.06 1.06l3.75 3.75zM3.75 13a.75.75 0 000 1.5h8.5a.75.75 0 000-1.5h-8.5z"></path>
|
||||
</svg>;
|
||||
|
||||
@@ -5,6 +5,7 @@ import { RawResultsSortState, QueryMetadata, SortDirection } from '../pure/inter
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { ResultSet } from '../pure/interface-types';
|
||||
import { vscode } from './vscode-api';
|
||||
import { convertNonPrintableChars } from '../text-utils';
|
||||
|
||||
export interface ResultTableProps {
|
||||
resultSet: ResultSet;
|
||||
@@ -37,9 +38,6 @@ export const oddRowClassName = 'vscode-codeql__result-table-row--odd';
|
||||
export const pathRowClassName = 'vscode-codeql__result-table-row--path';
|
||||
export const selectedRowClassName = 'vscode-codeql__result-table-row--selected';
|
||||
|
||||
const CONTROL_CODE = '\u001F'.codePointAt(0)!;
|
||||
const CONTROL_LABEL = '\u2400'.codePointAt(0)!;
|
||||
|
||||
export function jumpToLocationHandler(
|
||||
loc: ResolvableLocationValue,
|
||||
databaseUri: string,
|
||||
@@ -70,30 +68,6 @@ export function openFile(filePath: string): void {
|
||||
});
|
||||
}
|
||||
|
||||
function convertedNonprintableChars(label: string) {
|
||||
// If the label was empty, use a placeholder instead, so the link is still clickable.
|
||||
if (!label) {
|
||||
return '[empty string]';
|
||||
} else if (label.match(/^\s+$/)) {
|
||||
return `[whitespace: "${label}"]`;
|
||||
} else {
|
||||
/**
|
||||
* If the label contains certain non-printable characters, loop through each
|
||||
* character and replace it with the cooresponding unicode control label.
|
||||
*/
|
||||
const convertedLabelArray: any[] = [];
|
||||
for (let i = 0; i < label.length; i++) {
|
||||
const labelCheck = label.codePointAt(i)!;
|
||||
if (labelCheck <= CONTROL_CODE) {
|
||||
convertedLabelArray[i] = String.fromCodePoint(labelCheck + CONTROL_LABEL);
|
||||
} else {
|
||||
convertedLabelArray[i] = label.charAt(i);
|
||||
}
|
||||
}
|
||||
return convertedLabelArray.join('');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a location as a link which when clicked displays the original location.
|
||||
*/
|
||||
@@ -105,7 +79,7 @@ export function renderLocation(
|
||||
callback?: () => void
|
||||
): JSX.Element {
|
||||
|
||||
const displayLabel = convertedNonprintableChars(label!);
|
||||
const displayLabel = convertNonPrintableChars(label!);
|
||||
|
||||
if (loc === undefined) {
|
||||
return <span>{displayLabel}</span>;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user