Compare commits
449 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf662354fe | ||
|
|
6ea36867a2 | ||
|
|
8df061f443 | ||
|
|
0885a22984 | ||
|
|
0354b1caac | ||
|
|
d32a3a0deb | ||
|
|
e41dba7627 | ||
|
|
b2f4fecfb4 | ||
|
|
21b6adb92c | ||
|
|
90577f516f | ||
|
|
a2825162ac | ||
|
|
2e26b857f4 | ||
|
|
7bdd452d63 | ||
|
|
be9a7a35bc | ||
|
|
dd8600fcc6 | ||
|
|
cc8f304f96 | ||
|
|
1ca623f68a | ||
|
|
53c404b131 | ||
|
|
f8d5fd8f6e | ||
|
|
d49c2d7958 | ||
|
|
01d7329bc3 | ||
|
|
e8d230c8f5 | ||
|
|
44a3e6b557 | ||
|
|
8b2a3b18ce | ||
|
|
fad5bb31a0 | ||
|
|
484b7668cb | ||
|
|
33dd50ca4d | ||
|
|
195cd69567 | ||
|
|
268199e9e2 | ||
|
|
6cef629507 | ||
|
|
563489d1e0 | ||
|
|
ebfcce30ba | ||
|
|
159d900edb | ||
|
|
46233b9355 | ||
|
|
1d6a7f8df1 | ||
|
|
e380c78876 | ||
|
|
cd67ce9242 | ||
|
|
cd8d82daac | ||
|
|
726feb19e1 | ||
|
|
8c324a3263 | ||
|
|
119649144e | ||
|
|
4c527a3573 | ||
|
|
525f5234b6 | ||
|
|
908abb4413 | ||
|
|
a69ec03c6e | ||
|
|
a071470c5a | ||
|
|
2ae95144a5 | ||
|
|
60faed1ccc | ||
|
|
6e6ea76c97 | ||
|
|
d30eb27320 | ||
|
|
2104cb3d09 | ||
|
|
5644206777 | ||
|
|
a6a0ee5f50 | ||
|
|
74c1e583b4 | ||
|
|
326653e25a | ||
|
|
0d057aed3f | ||
|
|
c90eede573 | ||
|
|
ebba9949a8 | ||
|
|
d18e3dd40e | ||
|
|
9355f0633a | ||
|
|
f553523f73 | ||
|
|
627bb59bd5 | ||
|
|
95cbe02768 | ||
|
|
e73a6874b2 | ||
|
|
dc6ae6cc39 | ||
|
|
3902596823 | ||
|
|
c400485a4e | ||
|
|
1a7ddcf843 | ||
|
|
7cef45c434 | ||
|
|
69b06ae95c | ||
|
|
ae2bd81215 | ||
|
|
b9be9cff9f | ||
|
|
8c5d73bd76 | ||
|
|
81b53c9c19 | ||
|
|
a232b56bcd | ||
|
|
c26d786a1c | ||
|
|
99d2df2067 | ||
|
|
866b137fd4 | ||
|
|
76a00e5fa5 | ||
|
|
f0d71ba356 | ||
|
|
b4fbfb6d2b | ||
|
|
1d02c19854 | ||
|
|
3167ceec91 | ||
|
|
fba49020e3 | ||
|
|
dea36820e4 | ||
|
|
60df319754 | ||
|
|
0bdee6e77e | ||
|
|
88440ba148 | ||
|
|
a0fb3b47c8 | ||
|
|
86d10b439b | ||
|
|
902c489979 | ||
|
|
7fed5baebc | ||
|
|
d3e961ffb3 | ||
|
|
a20d9102e6 | ||
|
|
131d252a8b | ||
|
|
90023137ca | ||
|
|
fcecfa112e | ||
|
|
303a7d1662 | ||
|
|
7c935b37b0 | ||
|
|
339fc9a755 | ||
|
|
4138ca1085 | ||
|
|
6941584214 | ||
|
|
525d7f5f3d | ||
|
|
84621b7ecc | ||
|
|
2baa53a149 | ||
|
|
15579012f1 | ||
|
|
28b00b249b | ||
|
|
401da636a0 | ||
|
|
ab9cf465cc | ||
|
|
bb7246b612 | ||
|
|
b82cd8b6f4 | ||
|
|
f56f017a84 | ||
|
|
7dc5eebcc1 | ||
|
|
644a83d6d8 | ||
|
|
4f84376faa | ||
|
|
5e76c08f84 | ||
|
|
765c956481 | ||
|
|
deac8c8c02 | ||
|
|
a47031b0d5 | ||
|
|
3bf27b3472 | ||
|
|
9422c6d65c | ||
|
|
b81e3c7b94 | ||
|
|
011eee1d16 | ||
|
|
924d24b106 | ||
|
|
54ba5ced09 | ||
|
|
78a90ffa92 | ||
|
|
b95ee896df | ||
|
|
d33b07b2d1 | ||
|
|
3d7f303c65 | ||
|
|
540d6758d1 | ||
|
|
b5b34743f1 | ||
|
|
0a6db47b5f | ||
|
|
f679a2efec | ||
|
|
72253a1bb8 | ||
|
|
2065c7d75c | ||
|
|
ff4ea3e4c8 | ||
|
|
9bd932294a | ||
|
|
afdc8164c8 | ||
|
|
ea022f4cde | ||
|
|
48ced51035 | ||
|
|
177688dc56 | ||
|
|
c5cbf92b3a | ||
|
|
529ceb133e | ||
|
|
baaa3d31c0 | ||
|
|
9629c99ccb | ||
|
|
7ade7be0c4 | ||
|
|
4272cee01b | ||
|
|
d8fbc56ec2 | ||
|
|
e41b0ff779 | ||
|
|
cf3ba32906 | ||
|
|
741d364a52 | ||
|
|
49a2555dab | ||
|
|
f4e6a0db9b | ||
|
|
4e7b89864c | ||
|
|
02443b5ddd | ||
|
|
50b507dba5 | ||
|
|
aea5d33c42 | ||
|
|
b2427a6534 | ||
|
|
b95f6a5afb | ||
|
|
7b7413ba26 | ||
|
|
d33fa5df8a | ||
|
|
2efff809eb | ||
|
|
c442ff5599 | ||
|
|
e4de8c6b9b | ||
|
|
c032e4f9a7 | ||
|
|
487cc7b088 | ||
|
|
d9e9c1b885 | ||
|
|
e19637b59c | ||
|
|
066bf3fd26 | ||
|
|
7ab1f3a83d | ||
|
|
e3e2fcc349 | ||
|
|
17ed18a29d | ||
|
|
110d930b68 | ||
|
|
f8cc3aec32 | ||
|
|
f408418f23 | ||
|
|
0b638b6ae1 | ||
|
|
ce7c7119c7 | ||
|
|
5dce5e83b0 | ||
|
|
ac3b94dac8 | ||
|
|
519c3039b8 | ||
|
|
0a5c272b17 | ||
|
|
32ec043cbe | ||
|
|
454a1eab39 | ||
|
|
d3701944bf | ||
|
|
43bcd69e39 | ||
|
|
53a17d5728 | ||
|
|
b0dab966f3 | ||
|
|
e4a3161283 | ||
|
|
47e53da89c | ||
|
|
f8f81cfb40 | ||
|
|
fd43bed99d | ||
|
|
ffc3d406c2 | ||
|
|
11bf3c9462 | ||
|
|
9b2c40b298 | ||
|
|
abf6c6f108 | ||
|
|
910c1b7352 | ||
|
|
f47d6ec21c | ||
|
|
0e23dd59db | ||
|
|
160a0aebfe | ||
|
|
4d3385825b | ||
|
|
80862944d8 | ||
|
|
91344a74f6 | ||
|
|
7538ad1ba4 | ||
|
|
24c2663fe7 | ||
|
|
50aaf3b537 | ||
|
|
847082cd30 | ||
|
|
8c7c197b22 | ||
|
|
1f95eb2f49 | ||
|
|
7874a34947 | ||
|
|
a74c8a7cee | ||
|
|
3aced3c4d3 | ||
|
|
bec23f36d2 | ||
|
|
92bbf3a2e8 | ||
|
|
5c478e98d9 | ||
|
|
f26988731e | ||
|
|
e6f9ce050b | ||
|
|
52f993f748 | ||
|
|
99fe65f6f7 | ||
|
|
7d721d9544 | ||
|
|
1005ecdc6a | ||
|
|
c9f65be721 | ||
|
|
9ad28f36b4 | ||
|
|
9c076152cb | ||
|
|
bbb6f10f17 | ||
|
|
8a671be85c | ||
|
|
0476815f8a | ||
|
|
53dfd1243f | ||
|
|
d69772d1f8 | ||
|
|
2fd5f38574 | ||
|
|
06d22841cf | ||
|
|
0133cd7734 | ||
|
|
a53c04e2c1 | ||
|
|
eba6c190e8 | ||
|
|
d0e6e3ca89 | ||
|
|
cc00456cbc | ||
|
|
434567aa34 | ||
|
|
7b1a93d7c6 | ||
|
|
d3ea84e863 | ||
|
|
1b6685ef6f | ||
|
|
f26795ca17 | ||
|
|
617f7bab0a | ||
|
|
8da1a28478 | ||
|
|
4518d9a81d | ||
|
|
3817133b5b | ||
|
|
c9b68caee4 | ||
|
|
60c4d8d40a | ||
|
|
1a9d63315f | ||
|
|
5c8098f28d | ||
|
|
bcf70c6962 | ||
|
|
64f33a5f44 | ||
|
|
48a527ad52 | ||
|
|
faabe6d887 | ||
|
|
4b8d611d86 | ||
|
|
bfc9a17ffb | ||
|
|
a4a3f70984 | ||
|
|
98bae3253d | ||
|
|
70098aa19c | ||
|
|
1261fdd41e | ||
|
|
c914312e85 | ||
|
|
cd2b5a8c59 | ||
|
|
29a43c7dc1 | ||
|
|
8ef3c3713b | ||
|
|
54f83d11d6 | ||
|
|
22cfad6711 | ||
|
|
cbc2650f30 | ||
|
|
55b060af97 | ||
|
|
9f347d136b | ||
|
|
0d0367c39d | ||
|
|
ba0a30dcfe | ||
|
|
3079d7f285 | ||
|
|
10eb355900 | ||
|
|
0daea7399a | ||
|
|
1b0077a115 | ||
|
|
db5e743055 | ||
|
|
a6d63222f5 | ||
|
|
58e80ecce3 | ||
|
|
0ad44a3fe2 | ||
|
|
09dccc13a2 | ||
|
|
2cdded9cca | ||
|
|
e8a0b24f57 | ||
|
|
182c2f3b8e | ||
|
|
e5376b3469 | ||
|
|
ef22cf174e | ||
|
|
d158487081 | ||
|
|
2e9c0c301c | ||
|
|
f256e18041 | ||
|
|
aa23680603 | ||
|
|
e5fe2148ab | ||
|
|
c44b7b1d78 | ||
|
|
24ede1b66f | ||
|
|
6335b9881b | ||
|
|
8c0fee5a2e | ||
|
|
e95f8e85a8 | ||
|
|
c6531a293e | ||
|
|
e648d9c67c | ||
|
|
45efca9425 | ||
|
|
9071f54863 | ||
|
|
0aa34a51ff | ||
|
|
181b5d6f7b | ||
|
|
7502fdee67 | ||
|
|
24652a84e4 | ||
|
|
2ee46cfd81 | ||
|
|
7c4eac8520 | ||
|
|
6fdc632743 | ||
|
|
a38a0356a0 | ||
|
|
9383b03971 | ||
|
|
baf130d60e | ||
|
|
d15e3885d7 | ||
|
|
2211e2317d | ||
|
|
6018ebaca9 | ||
|
|
da9065101f | ||
|
|
80867e6f58 | ||
|
|
5067fbc452 | ||
|
|
d88b5170ac | ||
|
|
d4673d9ca0 | ||
|
|
87f45a7739 | ||
|
|
0c89df9a80 | ||
|
|
57666bbbe3 | ||
|
|
ba8b32078d | ||
|
|
fa4dd087e5 | ||
|
|
ac74b967b3 | ||
|
|
c349c6a048 | ||
|
|
234b05994c | ||
|
|
af8f0231c0 | ||
|
|
84bd029749 | ||
|
|
7d2e4b6de4 | ||
|
|
23a0e03cef | ||
|
|
21c5ed01ad | ||
|
|
d2af550bcc | ||
|
|
cf36a52762 | ||
|
|
ac1a97efa0 | ||
|
|
8d5067f622 | ||
|
|
fe5f1c417d | ||
|
|
95438bb7e3 | ||
|
|
6d7d0ca41a | ||
|
|
3749e17769 | ||
|
|
ee49fb5070 | ||
|
|
de6c523bad | ||
|
|
6612c279ae | ||
|
|
2dfa0e8b52 | ||
|
|
0197306713 | ||
|
|
269165eaa3 | ||
|
|
14c736d72e | ||
|
|
b8898b939c | ||
|
|
45da1e0f1f | ||
|
|
88c990c6ae | ||
|
|
ac7211c117 | ||
|
|
d1d13fbd2e | ||
|
|
f99166d26c | ||
|
|
9cd6f9a768 | ||
|
|
4dd16f4611 | ||
|
|
2113d08545 | ||
|
|
5b5ef26864 | ||
|
|
c5a6e64df8 | ||
|
|
178d626062 | ||
|
|
d1d48b3506 | ||
|
|
9180d1d9fc | ||
|
|
674c5ecbff | ||
|
|
951d0b1004 | ||
|
|
edcac6925c | ||
|
|
2989e4cfb9 | ||
|
|
8f869813a9 | ||
|
|
c10500c5ea | ||
|
|
0832850009 | ||
|
|
b352830674 | ||
|
|
e913165249 | ||
|
|
ef94bb3d38 | ||
|
|
4d6076c4ea | ||
|
|
43650fde00 | ||
|
|
f2c72a67f6 | ||
|
|
2b1f3227ce | ||
|
|
841f1d3310 | ||
|
|
99756ae63b | ||
|
|
9a2bea39e6 | ||
|
|
1aab49c719 | ||
|
|
cf925c256f | ||
|
|
8383a76e43 | ||
|
|
c6d792f41e | ||
|
|
277192e7d3 | ||
|
|
85988ecf34 | ||
|
|
49d12674b7 | ||
|
|
beeb19dc05 | ||
|
|
de88d27057 | ||
|
|
eb2d00e999 | ||
|
|
d58fb54928 | ||
|
|
fdc209ca08 | ||
|
|
28092f2b86 | ||
|
|
8970ad78ae | ||
|
|
e7a0c58940 | ||
|
|
02270aaeee | ||
|
|
51fb03b4b1 | ||
|
|
838a2b71ac | ||
|
|
f01c421d42 | ||
|
|
561bc6f53c | ||
|
|
24b421e82d | ||
|
|
3c57597a19 | ||
|
|
e8d5029912 | ||
|
|
cb514f5c78 | ||
|
|
57bb8cee41 | ||
|
|
1219ef4a8c | ||
|
|
677a0f7940 | ||
|
|
b8cca29eb3 | ||
|
|
4cbf104bdf | ||
|
|
26ccde9e7d | ||
|
|
beb5b78b89 | ||
|
|
c3a21b93c0 | ||
|
|
6b9f73e156 | ||
|
|
6409e09063 | ||
|
|
8f5611b074 | ||
|
|
7f3fcce1ac | ||
|
|
4bc1d1ed8a | ||
|
|
02e5b4e830 | ||
|
|
538792e8bb | ||
|
|
56ec970121 | ||
|
|
57a04297bd | ||
|
|
59f1e4e90a | ||
|
|
7c1fce3319 | ||
|
|
476ea7aef0 | ||
|
|
0c654c4320 | ||
|
|
895ac6ae26 | ||
|
|
52484f1211 | ||
|
|
cba188b4db | ||
|
|
123b1fc085 | ||
|
|
833f8e06ca | ||
|
|
747049ed1b | ||
|
|
d62e9181f2 | ||
|
|
e4d1f4e73e | ||
|
|
c1922126d3 | ||
|
|
d2ebb3d20a | ||
|
|
72858e341a | ||
|
|
40e0027074 | ||
|
|
84048ccac1 | ||
|
|
cbb09da0d0 | ||
|
|
c8d3428f21 | ||
|
|
49a1576d14 | ||
|
|
0cc4561ee9 | ||
|
|
576737cac8 | ||
|
|
742aa4ca19 | ||
|
|
b5e6700cba | ||
|
|
7f5302dc37 | ||
|
|
3ea5524048 | ||
|
|
1823ae8397 | ||
|
|
6dca9ccbeb | ||
|
|
f3c2862937 | ||
|
|
855cb485d5 | ||
|
|
cff235c420 | ||
|
|
1089a052ec | ||
|
|
1d195cb347 | ||
|
|
8d8ed28aea |
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -18,4 +18,8 @@ yarn.lock merge=binary
|
||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html
|
||||
# suggests that this might interleave lines arbitrarily, but empirically
|
||||
# it keeps added chunks contiguous
|
||||
CHANGELOG.md merge=union
|
||||
CHANGELOG.md merge=union
|
||||
|
||||
# Mark some JSON files containing test data as generated so they are not included
|
||||
# as part of diffs or language statistics.
|
||||
extensions/ql-vscode/src/stories/remote-queries/data/*.json linguist-generated
|
||||
|
||||
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.13.0'
|
||||
node-version: '16.14.2'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.13.0'
|
||||
node-version: '16.14.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -139,7 +139,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.0', 'nightly']
|
||||
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.1', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.13.0'
|
||||
node-version: '16.14.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.13.0'
|
||||
node-version: '16.14.2'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
25
.vscode/launch.json
vendored
25
.vscode/launch.json
vendored
@@ -35,6 +35,9 @@
|
||||
"runtimeArgs": [
|
||||
"--inspect=9229"
|
||||
],
|
||||
"env": {
|
||||
"LANG": "en-US"
|
||||
},
|
||||
"args": [
|
||||
"--exit",
|
||||
"-u",
|
||||
@@ -43,6 +46,8 @@
|
||||
"--diff",
|
||||
"-r",
|
||||
"ts-node/register",
|
||||
"-r",
|
||||
"test/mocha.setup.js",
|
||||
"test/pure-tests/**/*.ts"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
@@ -50,6 +55,18 @@
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen"
|
||||
},
|
||||
{
|
||||
"name": "Launch Unit Tests - React (vscode-codeql)",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
|
||||
"showAsyncStacks": true,
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen"
|
||||
},
|
||||
{
|
||||
"name": "Launch Integration Tests - No Workspace (vscode-codeql)",
|
||||
"type": "extensionHost",
|
||||
@@ -124,6 +141,14 @@
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Launch Storybook",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": ["run-script", "storybook"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@@ -30,12 +30,11 @@
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
],
|
||||
"eslint.options": {
|
||||
// This is necessary so that eslint can properly resolve its plugins
|
||||
"resolvePluginsRelativeTo": "./extensions/ql-vscode"
|
||||
},
|
||||
// This is necessary to ensure that ESLint can find the correct configuration files and plugins.
|
||||
"eslint.workingDirectories": ["./extensions/ql-vscode"],
|
||||
"editor.formatOnSave": false,
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
"javascript.preferences.quoteStyle": "single",
|
||||
"editor.wordWrapColumn": 100
|
||||
"editor.wordWrapColumn": 100,
|
||||
"jest.rootPath": "./extensions/ql-vscode"
|
||||
}
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
**/* @github/codeql-vscode-reviewers
|
||||
/extensions/ql-vscode/src/remote-queries/ @github/code-scanning-secexp-reviewers
|
||||
**/remote-queries/ @github/code-scanning-secexp-reviewers
|
||||
**/variant-analysis/ @github/code-scanning-secexp-reviewers
|
||||
|
||||
@@ -77,6 +77,20 @@ $ vscode/scripts/code-cli.sh --install-extension dist/vscode-codeql-*.vsix # if
|
||||
|
||||
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
|
||||
|
||||
### Storybook
|
||||
|
||||
You can use [Storybook](https://storybook.js.org/) to preview React components outside VSCode. Inside the `extensions/ql-vscode` directory, run:
|
||||
|
||||
```shell
|
||||
npm run storybook
|
||||
```
|
||||
|
||||
Your browser should automatically open to the Storybook UI. Stories live in the `src/stories` directory.
|
||||
|
||||
Alternatively, you can start Storybook inside of VSCode. There is a VSCode launch configuration for starting Storybook. It can be found in the debug view.
|
||||
|
||||
More information about Storybook can be found inside the **Overview** page once you have launched Storybook.
|
||||
|
||||
### Running the unit tests and integration tests that do not require a CLI instance
|
||||
|
||||
Unit tests and many integration tests do not require a copy of the CodeQL CLI.
|
||||
@@ -146,6 +160,7 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
|
||||
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
||||
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
||||
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||
* DO NOT approve the "publish" stages of the workflow yet.
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
|
||||
@@ -10,7 +10,7 @@ module.exports = {
|
||||
node: true,
|
||||
es6: true,
|
||||
},
|
||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:jest-dom/recommended"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-use-before-define": 0,
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
|
||||
2
extensions/ql-vscode/.npmrc
Normal file
2
extensions/ql-vscode/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
||||
# Storybook requires this option to be set. See https://github.com/storybookjs/storybook/issues/18298
|
||||
legacy-peer-deps=true
|
||||
@@ -1 +1 @@
|
||||
v16.13.0
|
||||
v16.14.2
|
||||
|
||||
19
extensions/ql-vscode/.storybook/main.ts
Normal file
19
extensions/ql-vscode/.storybook/main.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { StorybookConfig } from '@storybook/core-common';
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: [
|
||||
'../src/**/*.stories.mdx',
|
||||
'../src/**/*.stories.@(js|jsx|ts|tsx)'
|
||||
],
|
||||
addons: [
|
||||
'@storybook/addon-links',
|
||||
'@storybook/addon-essentials',
|
||||
'@storybook/addon-interactions'
|
||||
],
|
||||
framework: '@storybook/react',
|
||||
core: {
|
||||
builder: '@storybook/builder-webpack5'
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
7
extensions/ql-vscode/.storybook/manager.ts
Normal file
7
extensions/ql-vscode/.storybook/manager.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { addons } from '@storybook/addons';
|
||||
import { themes } from '@storybook/theming';
|
||||
|
||||
addons.setConfig({
|
||||
theme: themes.dark,
|
||||
enableShortcuts: false,
|
||||
});
|
||||
38
extensions/ql-vscode/.storybook/preview.ts
Normal file
38
extensions/ql-vscode/.storybook/preview.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { themes } from '@storybook/theming';
|
||||
import { action } from '@storybook/addon-actions';
|
||||
|
||||
// Allow all stories/components to use Codicons
|
||||
import '@vscode/codicons/dist/codicon.css';
|
||||
|
||||
import '../src/stories/vscode-theme.css';
|
||||
|
||||
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
|
||||
export const parameters = {
|
||||
// All props starting with `on` will automatically receive an action as a prop
|
||||
actions: { argTypesRegex: "^on[A-Z].*" },
|
||||
// All props matching these names will automatically get the correct control
|
||||
controls: {
|
||||
matchers: {
|
||||
color: /(background|color)$/i,
|
||||
date: /Date$/,
|
||||
},
|
||||
},
|
||||
// Use a dark theme to be aligned with VSCode
|
||||
docs: {
|
||||
theme: themes.dark,
|
||||
},
|
||||
backgrounds: {
|
||||
default: 'dark',
|
||||
values: [
|
||||
{
|
||||
name: 'dark',
|
||||
value: '#1e1e1e',
|
||||
},
|
||||
],
|
||||
}
|
||||
};
|
||||
|
||||
(window as any).acquireVsCodeApi = () => ({
|
||||
postMessage: action('post-vscode-message'),
|
||||
setState: action('set-vscode-state'),
|
||||
});
|
||||
@@ -1,5 +1,34 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.7.2 - 14 October 2022
|
||||
|
||||
- Fix a bug where results created in older versions were thought to be unsuccessful. [#1605](https://github.com/github/vscode-codeql/pull/1605)
|
||||
|
||||
## 1.7.1 - 12 October 2022
|
||||
|
||||
- Fix a bug where it was not possible to add a database folder if the folder name starts with `db-`. [#1565](https://github.com/github/vscode-codeql/pull/1565)
|
||||
- Ensure the results view opens in an editor column beside the currently active editor. [#1557](https://github.com/github/vscode-codeql/pull/1557)
|
||||
|
||||
## 1.7.0 - 20 September 2022
|
||||
|
||||
- Remove ability to download databases from LGTM. [#1467](https://github.com/github/vscode-codeql/pull/1467)
|
||||
- Remove the ability to manually upgrade databases from the context menu on databases. Databases are non-destructively upgraded automatically so for most users this was not needed. For advanced users this is still available in the Command Palette. [#1501](https://github.com/github/vscode-codeql/pull/1501)
|
||||
- Always restart the query server after a manual database upgrade. This avoids a bug in the query server where an invalid dbscheme was being retained in memory after an upgrade. [#1519](https://github.com/github/vscode-codeql/pull/1519)
|
||||
|
||||
## 1.6.12 - 1 September 2022
|
||||
|
||||
- Add ability for users to download databases directly from GitHub. [#1485](https://github.com/github/vscode-codeql/pull/1485)
|
||||
- Fix a race condition that could cause a failure to open the evaluator log when running a query. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||
- Fix an error when running a query with an older version of the CodeQL CLI. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||
|
||||
## 1.6.11 - 25 August 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.10 - 9 August 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.9 - 20 July 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
@@ -22,7 +22,7 @@ For information about other configurations, see the separate [CodeQL help](https
|
||||
|
||||
### Quick start: Using CodeQL
|
||||
|
||||
1. [Import a database from LGTM](#importing-a-database-from-lgtm).
|
||||
1. [Import a database from GitHub](#importing-a-database-from-github).
|
||||
1. [Run a query](#running-a-query).
|
||||
|
||||
---
|
||||
@@ -73,18 +73,19 @@ If you're using your own clone of the CodeQL standard libraries, you can do a `g
|
||||
|
||||
You can find all the commands contributed by the extension in the Command Palette (**Ctrl+Shift+P** or **Cmd+Shift+P**) by typing `CodeQL`, many of them are also accessible through the interface, and via keyboard shortcuts.
|
||||
|
||||
### Importing a database from LGTM
|
||||
### Importing a database from GitHub
|
||||
|
||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from LGTM.com.
|
||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from GitHub.com.
|
||||
|
||||
1. Open [LGTM.com](https://lgtm.com/#explore) in your browser.
|
||||
1. Search for a project you're interested in, for example [Apache Kafka](https://lgtm.com/projects/g/apache/kafka).
|
||||
1. Copy the link to that project, for example `https://lgtm.com/projects/g/apache/kafka`.
|
||||
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from LGTM** command.
|
||||
1. Find a project that you're interested in on GitHub.com, for example [Apache Kafka](https://github.com/apache/kafka).
|
||||
1. Copy the link to that project, for example `https://github.com/apache/kafka`.
|
||||
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from GitHub** command.
|
||||
1. Paste the link you copied earlier.
|
||||
1. Select the language for the database you want to download (only required if the project has databases for multiple languages).
|
||||
1. Once the CodeQL database has been imported, it is displayed in the Databases view.
|
||||
|
||||
For more information, see [Choosing a database](https://codeql.github.com/docs/codeql-for-visual-studio-code/analyzing-your-projects/#choosing-a-database) on codeql.github.com.
|
||||
|
||||
### Running a query
|
||||
|
||||
The instructions below assume that you're using the CodeQL starter workspace, or that you've added the CodeQL libraries and queries repository to your workspace.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput, watchCss } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, cleanOutput } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView, watchView } from './webpack';
|
||||
@@ -10,7 +10,7 @@ export const buildWithoutPackage =
|
||||
gulp.series(
|
||||
cleanOutput,
|
||||
gulp.parallel(
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData
|
||||
)
|
||||
);
|
||||
|
||||
@@ -23,6 +23,5 @@ export {
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
watchCss
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -39,13 +39,3 @@ export function compileTypeScript() {
|
||||
export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
export function watchCss() {
|
||||
gulp.watch('src/**/*.css', copyViewCss);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/**/view/*.css')
|
||||
.pipe(gulp.dest('out'));
|
||||
}
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import * as path from 'path';
|
||||
import * as webpack from 'webpack';
|
||||
import * as MiniCssExtractPlugin from 'mini-css-extract-plugin';
|
||||
|
||||
export const config: webpack.Configuration = {
|
||||
mode: 'development',
|
||||
entry: {
|
||||
resultsView: './src/view/results.tsx',
|
||||
compareView: './src/compare/view/Compare.tsx',
|
||||
remoteQueriesView: './src/remote-queries/view/RemoteQueries.tsx',
|
||||
webview: './src/view/webview.tsx'
|
||||
},
|
||||
output: {
|
||||
path: path.resolve(__dirname, '..', 'out'),
|
||||
@@ -31,9 +30,7 @@ export const config: webpack.Configuration = {
|
||||
{
|
||||
test: /\.less$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'style-loader'
|
||||
},
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: 'css-loader',
|
||||
options: {
|
||||
@@ -53,17 +50,31 @@ export const config: webpack.Configuration = {
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'style-loader'
|
||||
},
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: 'css-loader'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
test: /\.(woff(2)?|ttf|eot)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: 'fonts/',
|
||||
// We need this to make Webpack use the correct path for the fonts.
|
||||
// Without this, the CSS file will use `url([object Module])`
|
||||
esModule: false
|
||||
}
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
}
|
||||
},
|
||||
plugins: [new MiniCssExtractPlugin()],
|
||||
};
|
||||
|
||||
214
extensions/ql-vscode/jest.config.js
Normal file
214
extensions/ql-vscode/jest.config.js
Normal file
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property and type check, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/private/var/folders/6m/1394pht172qgd7dmw1fwjk100000gn/T/jest_dx",
|
||||
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
// clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: 'v8',
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// The default configuration for fake timers
|
||||
// fakeTimers: {
|
||||
// "enableGlobally": false
|
||||
// },
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
moduleFileExtensions: [
|
||||
'js',
|
||||
'mjs',
|
||||
'cjs',
|
||||
'jsx',
|
||||
'ts',
|
||||
'tsx',
|
||||
'json'
|
||||
],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
'moduleNameMapper': {
|
||||
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': '<rootDir>/test/__mocks__/fileMock.ts',
|
||||
'\\.(css|less)$': '<rootDir>/test/__mocks__/styleMock.ts'
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: 'ts-jest',
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest.setup.ts'],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: 'jsdom',
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
testMatch: [
|
||||
'**/__tests__/**/*.[jt]s?(x)'
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
'^.+\\.tsx?$': [
|
||||
'ts-jest',
|
||||
{
|
||||
tsconfig: 'src/view/tsconfig.spec.json',
|
||||
},
|
||||
],
|
||||
'node_modules': [
|
||||
'babel-jest',
|
||||
{
|
||||
presets: [
|
||||
'@babel/preset-env'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-transform-modules-commonjs',
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
'transformIgnorePatterns': [
|
||||
// These use ES modules, so need to be transformed
|
||||
'node_modules/(?!(?:@vscode/webview-ui-toolkit|@microsoft/.+|exenv-es6)/.*)'
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
47746
extensions/ql-vscode/package-lock.json
generated
47746
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.6.9",
|
||||
"version": "1.7.2",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -35,9 +35,11 @@
|
||||
},
|
||||
"activationEvents": [
|
||||
"onLanguage:ql",
|
||||
"onLanguage:ql-summary",
|
||||
"onView:codeQLDatabases",
|
||||
"onView:codeQLQueryHistory",
|
||||
"onView:codeQLAstViewer",
|
||||
"onView:codeQLEvalLogViewer",
|
||||
"onView:test-explorer",
|
||||
"onCommand:codeQL.checkForUpdatesToCLI",
|
||||
"onCommand:codeQL.authenticateToGitHub",
|
||||
@@ -61,6 +63,7 @@
|
||||
"onCommand:codeQL.quickQuery",
|
||||
"onCommand:codeQL.restartQueryServer",
|
||||
"onWebviewPanel:resultsView",
|
||||
"onWebviewPanel:codeQL.variantAnalysis",
|
||||
"onFileSystem:codeql-zip-archive"
|
||||
],
|
||||
"main": "./out/extension",
|
||||
@@ -110,6 +113,12 @@
|
||||
"extensions": [
|
||||
".qhelp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "ql-summary",
|
||||
"filenames": [
|
||||
"evaluator-log.summary"
|
||||
]
|
||||
}
|
||||
],
|
||||
"grammars": [
|
||||
@@ -280,7 +289,7 @@
|
||||
"default": "",
|
||||
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
|
||||
"patternErrorMessage": "Please enter a valid GitHub repository",
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository in which the GitHub Actions workflow is run when using the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -305,6 +314,10 @@
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"title": "CodeQL: Export Variant Analysis Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openVariantAnalysis",
|
||||
"title": "CodeQL: Open Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"title": "CodeQL: Run Queries in Selected Files"
|
||||
@@ -527,11 +540,15 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw)"
|
||||
"title": "Show Evaluator Log (Raw JSON)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary)"
|
||||
"title": "Show Evaluator Log (Summary Text)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"title": "Show Evaluator Log (UI)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
@@ -608,6 +625,19 @@
|
||||
"light": "media/light/clear-all.svg",
|
||||
"dark": "media/dark/clear-all.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"title": "Clear Viewer",
|
||||
"icon": {
|
||||
"light": "media/light/clear-all.svg",
|
||||
"dark": "media/dark/clear-all.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQL.gotoQL",
|
||||
"title": "CodeQL: Go to QL Code",
|
||||
"enablement": "codeql.hasQLSource"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
@@ -639,12 +669,12 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "view == codeQLDatabases",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
@@ -681,6 +711,11 @@
|
||||
"command": "codeQLAstViewer.clear",
|
||||
"when": "view == codeQLAstViewer",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"when": "view == codeQLEvalLogViewer",
|
||||
"group": "navigation"
|
||||
}
|
||||
],
|
||||
"view/item/context": [
|
||||
@@ -694,11 +729,6 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.upgradeDatabase",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.renameDatabase",
|
||||
"group": "9_qlCommands",
|
||||
@@ -722,7 +752,7 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.removeHistoryItem",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem"
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem || viewItem == cancelledRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.setLabel",
|
||||
@@ -754,6 +784,11 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"group": "9_qlCommands",
|
||||
"when": "config.codeQL.canary && codeql.supportsEvalLog && viewItem == rawResultsItem || config.codeQL.canary && codeql.supportsEvalLog && viewItem == interpretedResultsItem || config.codeQL.canary && codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"group": "9_qlCommands",
|
||||
@@ -792,7 +827,7 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledResultsItem"
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
@@ -859,6 +894,10 @@
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openVariantAnalysis",
|
||||
"when": "config.codeQL.canary && config.codeQL.variantAnalysis.liveResults"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"when": "config.codeQL.canary"
|
||||
@@ -892,7 +931,7 @@
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
@@ -975,6 +1014,10 @@
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
@@ -1043,6 +1086,10 @@
|
||||
"command": "codeQLAstViewer.clear",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.acceptOutput",
|
||||
"when": "false"
|
||||
@@ -1084,6 +1131,10 @@
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"when": "resourceExtname == .qhelp && isWorkspaceTrusted"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.gotoQL",
|
||||
"when": "editorLangId == ql-summary && config.codeQL.canary"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1109,6 +1160,11 @@
|
||||
{
|
||||
"id": "codeQLAstViewer",
|
||||
"name": "AST Viewer"
|
||||
},
|
||||
{
|
||||
"id": "codeQLEvalLogViewer",
|
||||
"name": "Evaluator Log Viewer",
|
||||
"when": "config.codeQL.canary"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1123,7 +1179,11 @@
|
||||
},
|
||||
{
|
||||
"view": "codeQLDatabases",
|
||||
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From LGTM](command:codeQLDatabases.chooseDatabaseLgtm)"
|
||||
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From GitHub](command:codeQLDatabases.chooseDatabaseGithub)"
|
||||
},
|
||||
{
|
||||
"view": "codeQLEvalLogViewer",
|
||||
"contents": "Run the 'Show Evaluator Log (UI)' command on a CodeQL query run in the Query History view."
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1132,29 +1192,33 @@
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:css": "gulp watchCss",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"test": "npm-run-all -p test:*",
|
||||
"test:unit": "mocha --exit -r ts-node/register -r test/mocha.setup.js test/pure-tests/**/*.ts",
|
||||
"test:view": "jest",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
"cli-integration": "npm run preintegration && node ./out/vscode-tests/run-integration-tests.js cli-integration",
|
||||
"update-vscode": "node ./node_modules/vscode/bin/install",
|
||||
"format": "tsfmt -r && eslint src test --ext .ts,.tsx --fix",
|
||||
"lint": "eslint src test --ext .ts,.tsx --max-warnings=0",
|
||||
"format-staged": "lint-staged"
|
||||
"format-staged": "lint-staged",
|
||||
"storybook": "start-storybook -p 6006",
|
||||
"build-storybook": "build-storybook"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.5.6",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@primer/octicons-react": "^16.3.0",
|
||||
"@octokit/rest": "^19.0.4",
|
||||
"@primer/octicons-react": "^17.6.0",
|
||||
"@primer/react": "^35.0.0",
|
||||
"@vscode/codicons": "^0.0.31",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.0",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.1",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"d3": "^6.3.1",
|
||||
"d3": "^7.6.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^4.2.2",
|
||||
"immutable": "^4.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
@@ -1181,11 +1245,25 @@
|
||||
"zip-a-folder": "~1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.18.13",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.18.6",
|
||||
"@faker-js/faker": "^7.5.0",
|
||||
"@storybook/addon-actions": "^6.5.10",
|
||||
"@storybook/addon-essentials": "^6.5.10",
|
||||
"@storybook/addon-interactions": "^6.5.10",
|
||||
"@storybook/addon-links": "^6.5.10",
|
||||
"@storybook/builder-webpack5": "^6.5.10",
|
||||
"@storybook/manager-webpack5": "^6.5.10",
|
||||
"@storybook/react": "^6.5.10",
|
||||
"@storybook/testing-library": "^0.0.13",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/user-event": "^14.4.3",
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3": "^7.4.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
@@ -1194,6 +1272,7 @@
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/jest": "^29.0.2",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
"@types/mocha": "^9.0.0",
|
||||
@@ -1214,40 +1293,49 @@
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
"@vscode/test-electron": "^2.1.5",
|
||||
"ansi-colors": "^4.1.1",
|
||||
"applicationinsights": "^1.8.7",
|
||||
"applicationinsights": "^2.3.5",
|
||||
"babel-loader": "^8.2.5",
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "~7.1.1",
|
||||
"css-loader": "~3.1.0",
|
||||
"del": "^6.0.0",
|
||||
"eslint": "~6.8.0",
|
||||
"eslint-plugin-jest-dom": "^4.0.2",
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-storybook": "^0.6.4",
|
||||
"file-loader": "^6.2.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"husky": "~4.3.8",
|
||||
"jest": "^29.0.3",
|
||||
"jest-environment-jsdom": "^29.0.3",
|
||||
"lint-staged": "~10.2.2",
|
||||
"mini-css-extract-plugin": "^2.6.1",
|
||||
"mocha": "^10.0.0",
|
||||
"mocha-sinon": "~2.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~13.0.1",
|
||||
"sinon": "~14.0.0",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~3.3.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-jest": "^29.0.1",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^2.7.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^5.62.2",
|
||||
"webpack-cli": "^4.6.0"
|
||||
},
|
||||
|
||||
129
extensions/ql-vscode/src/abstract-webview.ts
Normal file
129
extensions/ql-vscode/src/abstract-webview.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
WebviewPanelOptions,
|
||||
WebviewOptions
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { tmpDir } from './helpers';
|
||||
import { getHtmlForWebview, WebviewMessage, WebviewView } from './interface-utils';
|
||||
|
||||
export type WebviewPanelConfig = {
|
||||
viewId: string;
|
||||
title: string;
|
||||
viewColumn: ViewColumn;
|
||||
view: WebviewView;
|
||||
preserveFocus?: boolean;
|
||||
additionalOptions?: WebviewPanelOptions & WebviewOptions;
|
||||
}
|
||||
|
||||
export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMessage extends WebviewMessage> extends DisposableObject {
|
||||
protected panel: WebviewPanel | undefined;
|
||||
protected panelLoaded = false;
|
||||
protected panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
protected readonly ctx: ExtensionContext
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public async restoreView(panel: WebviewPanel): Promise<void> {
|
||||
this.panel = panel;
|
||||
this.setupPanel(panel);
|
||||
}
|
||||
|
||||
protected get isShowingPanel() {
|
||||
return !!this.panel;
|
||||
}
|
||||
|
||||
protected getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
|
||||
const config = this.getPanelConfig();
|
||||
|
||||
this.panel = Window.createWebviewPanel(
|
||||
config.viewId,
|
||||
config.title,
|
||||
{ viewColumn: config.viewColumn, preserveFocus: config.preserveFocus },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
...config.additionalOptions,
|
||||
localResourceRoots: [
|
||||
...(config.additionalOptions?.localResourceRoots ?? []),
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(path.join(ctx.extensionPath, 'out'))
|
||||
],
|
||||
}
|
||||
);
|
||||
this.setupPanel(this.panel);
|
||||
}
|
||||
return this.panel;
|
||||
}
|
||||
|
||||
protected setupPanel(panel: WebviewPanel): void {
|
||||
const config = this.getPanelConfig();
|
||||
|
||||
this.push(
|
||||
panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.panelLoaded = false;
|
||||
this.onPanelDispose();
|
||||
},
|
||||
null,
|
||||
this.ctx.subscriptions
|
||||
)
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
this.ctx,
|
||||
panel.webview,
|
||||
config.view,
|
||||
{
|
||||
allowInlineStyles: true,
|
||||
}
|
||||
);
|
||||
this.push(
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.onMessage(e),
|
||||
undefined,
|
||||
this.ctx.subscriptions
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
protected abstract getPanelConfig(): WebviewPanelConfig;
|
||||
|
||||
protected abstract onPanelDispose(): void;
|
||||
|
||||
protected abstract onMessage(msg: FromMessage): Promise<void>;
|
||||
|
||||
protected waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
protected onWebViewLoaded(): void {
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
}
|
||||
|
||||
protected postMessage(msg: ToMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
}
|
||||
@@ -167,21 +167,26 @@ type Archive = {
|
||||
dirMap: DirectoryHierarchyMap;
|
||||
};
|
||||
|
||||
async function parse_zip(zipPath: string): Promise<Archive> {
|
||||
if (!await fs.pathExists(zipPath))
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
||||
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
||||
return archive;
|
||||
}
|
||||
|
||||
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
private readOnlyError = vscode.FileSystemError.NoPermissions('write operation attempted, but source archive filesystem is readonly');
|
||||
private archives: Map<string, Archive> = new Map;
|
||||
private archives: Map<string, Promise<Archive>> = new Map;
|
||||
|
||||
private async getArchive(zipPath: string): Promise<Archive> {
|
||||
if (!this.archives.has(zipPath)) {
|
||||
if (!await fs.pathExists(zipPath))
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
||||
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
||||
this.archives.set(zipPath, archive);
|
||||
this.archives.set(zipPath, parse_zip(zipPath));
|
||||
}
|
||||
return this.archives.get(zipPath)!;
|
||||
return await this.archives.get(zipPath)!;
|
||||
}
|
||||
|
||||
|
||||
root = new Directory('');
|
||||
|
||||
// metadata
|
||||
|
||||
@@ -76,16 +76,27 @@ export class Credentials {
|
||||
}));
|
||||
}
|
||||
|
||||
async getOctokit(): Promise<Octokit.Octokit> {
|
||||
/**
|
||||
* Creates or returns an instance of Octokit.
|
||||
*
|
||||
* @param requireAuthentication Whether the Octokit instance needs to be authenticated as user.
|
||||
* @returns An instance of Octokit.
|
||||
*/
|
||||
async getOctokit(requireAuthentication = true): Promise<Octokit.Octokit> {
|
||||
if (this.octokit) {
|
||||
return this.octokit;
|
||||
}
|
||||
|
||||
this.octokit = await this.createOctokit(true);
|
||||
// octokit shouldn't be undefined, since we've set "createIfNone: true".
|
||||
// The following block is mainly here to prevent a compiler error.
|
||||
this.octokit = await this.createOctokit(requireAuthentication);
|
||||
|
||||
if (!this.octokit) {
|
||||
throw new Error('Did not initialize Octokit.');
|
||||
if (requireAuthentication) {
|
||||
throw new Error('Did not initialize Octokit.');
|
||||
}
|
||||
|
||||
// We don't want to set this in this.octokit because that would prevent
|
||||
// authenticating when requireCredentials is true.
|
||||
return new Octokit.Octokit({ retry });
|
||||
}
|
||||
return this.octokit;
|
||||
}
|
||||
|
||||
@@ -11,12 +11,12 @@ import { promisify } from 'util';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { CliConfig } from './config';
|
||||
import { allowCanaryQueryServer, CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
import { CompilationMessage } from './pure/legacy-messages';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||
|
||||
@@ -168,7 +168,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
nullBuffer: Buffer;
|
||||
|
||||
/** Version of current cli, lazily computed by the `getVersion()` method */
|
||||
private _version: SemVer | undefined;
|
||||
private _version: Promise<SemVer> | undefined;
|
||||
|
||||
/**
|
||||
* The languages supported by the current version of the CLI, computed by `getSupportedLanguages()`.
|
||||
@@ -240,7 +240,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
/**
|
||||
* Restart the server when the current command terminates
|
||||
*/
|
||||
private restartCliServer(): void {
|
||||
restartCliServer(): void {
|
||||
const callback = (): void => {
|
||||
try {
|
||||
this.killProcessIfRunning();
|
||||
@@ -683,6 +683,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
...(await this.cliConstraints.supportsSourceMap() ? ['--sourcemap'] : []),
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
@@ -694,7 +695,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a JSON summary of it to.
|
||||
*/
|
||||
async generateJsonLogSummary(
|
||||
async generateJsonLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
): Promise<string> {
|
||||
@@ -984,13 +985,13 @@ export class CodeQLCliServer implements Disposable {
|
||||
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
this._version = this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
return await this._version;
|
||||
}
|
||||
|
||||
private async refreshVersion() {
|
||||
@@ -1247,6 +1248,9 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
||||
|
||||
|
||||
public static CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES = new SemVer('2.4.2');
|
||||
|
||||
/**
|
||||
* CLI version where `codeql resolve upgrades` supports
|
||||
* the `--allow-downgrades` flag
|
||||
@@ -1260,7 +1264,7 @@ export class CliVersionConstraint {
|
||||
|
||||
/**
|
||||
* CLI version where database registration was introduced
|
||||
*/
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
||||
|
||||
/**
|
||||
@@ -1321,6 +1325,16 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
/**
|
||||
* CLI version that supports the `--sourcemap` option for log generation.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_SOURCEMAP = new SemVer('2.10.3');
|
||||
|
||||
/**
|
||||
* CLI version that supports the new query server.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_NEW_QUERY_SERVER = new SemVer('2.11.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
@@ -1337,6 +1351,10 @@ export class CliVersionConstraint {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_LANGUAGE);
|
||||
}
|
||||
|
||||
public async supportsNonDestructiveUpgrades() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES);
|
||||
}
|
||||
|
||||
public async supportsDowngrades() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DOWNGRADES);
|
||||
}
|
||||
@@ -1388,4 +1406,16 @@ export class CliVersionConstraint {
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsSourceMap() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_SOURCEMAP);
|
||||
}
|
||||
|
||||
async supportsNewQueryServer() {
|
||||
// TODO while under development, users _must_ opt-in to the new query server
|
||||
// by setting the `codeql.canaryQueryServer` setting to `true`.
|
||||
// Ignore the version check for now.
|
||||
return allowCanaryQueryServer();
|
||||
// return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { tmpDir } from '../helpers';
|
||||
import {
|
||||
FromCompareViewMessage,
|
||||
ToCompareViewMessage,
|
||||
@@ -17,26 +11,24 @@ import {
|
||||
import { Logger } from '../logging';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager } from '../databases';
|
||||
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
|
||||
|
||||
interface ComparePair {
|
||||
from: CompletedLocalQueryInfo;
|
||||
to: CompletedLocalQueryInfo;
|
||||
}
|
||||
|
||||
export class CompareInterfaceManager extends DisposableObject {
|
||||
export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompareViewMessage> {
|
||||
private comparePair: ComparePair | undefined;
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
private ctx: ExtensionContext,
|
||||
ctx: ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
private cliServer: CodeQLCliServer,
|
||||
private logger: Logger,
|
||||
@@ -45,7 +37,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
item: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
) {
|
||||
super();
|
||||
super(ctx);
|
||||
}
|
||||
|
||||
async showResults(
|
||||
@@ -103,73 +95,24 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'compareView',
|
||||
'Compare CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
],
|
||||
}
|
||||
));
|
||||
this.push(this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.comparePair = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
));
|
||||
|
||||
const scriptPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/compareView.js')
|
||||
);
|
||||
|
||||
const stylesheetPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[stylesheetPathOnDisk],
|
||||
false
|
||||
);
|
||||
this.push(panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
));
|
||||
}
|
||||
return this.panel;
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: 'compareView',
|
||||
title: 'Compare CodeQL Query Results',
|
||||
viewColumn: ViewColumn.Active,
|
||||
preserveFocus: true,
|
||||
view: 'compare',
|
||||
};
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
protected onPanelDispose(): void {
|
||||
this.comparePair = undefined;
|
||||
}
|
||||
|
||||
private async handleMsgFromView(
|
||||
msg: FromCompareViewMessage
|
||||
): Promise<void> {
|
||||
protected async onMessage(msg: FromCompareViewMessage): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'compareViewLoaded':
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
case 'viewLoaded':
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
|
||||
case 'changeCompare':
|
||||
@@ -186,10 +129,6 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
private postMessage(msg: ToCompareViewMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private async findCommonResultSetNames(
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
@@ -1,13 +0,0 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: true
|
||||
},
|
||||
extends: [
|
||||
"plugin:react/recommended"
|
||||
],
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": ["ES2021", "dom"],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"experimentalDecorators": true
|
||||
},
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -4,6 +4,8 @@ import { DistributionManager } from './distribution';
|
||||
import { logger } from './logging';
|
||||
import { ONE_DAY_IN_MS } from './pure/time';
|
||||
|
||||
export const ALL_SETTINGS: Setting[] = [];
|
||||
|
||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||
export class Setting {
|
||||
name: string;
|
||||
@@ -12,6 +14,7 @@ export class Setting {
|
||||
constructor(name: string, parent?: Setting) {
|
||||
this.name = name;
|
||||
this.parent = parent;
|
||||
ALL_SETTINGS.push(this);
|
||||
}
|
||||
|
||||
get qualifiedName(): string {
|
||||
@@ -36,6 +39,18 @@ export class Setting {
|
||||
return workspace.getConfiguration(this.parent.qualifiedName).update(this.name, value, target);
|
||||
}
|
||||
|
||||
inspect<T>(): InspectionResult<T> | undefined {
|
||||
if (this.parent === undefined) {
|
||||
throw new Error('Cannot update the value of a root setting.');
|
||||
}
|
||||
return workspace.getConfiguration(this.parent.qualifiedName).inspect(this.name);
|
||||
}
|
||||
}
|
||||
|
||||
export interface InspectionResult<T> {
|
||||
globalValue?: T;
|
||||
workspaceValue?: T,
|
||||
workspaceFolderValue?: T,
|
||||
}
|
||||
|
||||
const ROOT_SETTING = new Setting('codeQL');
|
||||
@@ -317,6 +332,17 @@ export function isCanary() {
|
||||
return !!CANARY_FEATURES.getValue<boolean>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables the experimental query server
|
||||
*/
|
||||
export const CANARY_QUERY_SERVER = new Setting('canaryQueryServer', ROOT_SETTING);
|
||||
|
||||
|
||||
export function allowCanaryQueryServer() {
|
||||
return !!CANARY_QUERY_SERVER.getValue<boolean>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Avoids caching in the AST viewer if the user is also a canary user.
|
||||
*/
|
||||
@@ -343,12 +369,12 @@ export async function setRemoteRepositoryLists(lists: Record<string, string[]> |
|
||||
}
|
||||
|
||||
/**
|
||||
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||
* the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
*
|
||||
* This setting should be a path to a JSON file that contains a JSON object where each key is a
|
||||
* user-specified name (string), and the value is an array of GitHub repositories
|
||||
* user-specified name (string), and the value is an array of GitHub repositories
|
||||
* (of the form `<owner>/<repo>`).
|
||||
*/
|
||||
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
|
||||
@@ -387,3 +413,13 @@ export function getActionBranch(): string {
|
||||
export function isIntegrationTestMode() {
|
||||
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||
}
|
||||
|
||||
/**
|
||||
* A flag indicating whether to enable the experimental "live results" feature
|
||||
* for multi-repo variant analyses.
|
||||
*/
|
||||
const LIVE_RESULTS = new Setting('liveResults', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function isVariantAnalysisLiveResultsEnabled(): boolean {
|
||||
return !!LIVE_RESULTS.getValue<boolean>();
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { QueryWithResults } from '../run-queries';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
|
||||
@@ -3,13 +3,12 @@ import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager, DatabaseItem } from '../databases';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { QueryWithResults, compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from '../run-queries';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { KeyType } from './keyType';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { CancellationToken, LocationLink, Uri } from 'vscode';
|
||||
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
|
||||
export const SELECT_QUERY_NAME = '#select';
|
||||
export const TEMPLATE_NAME = 'selectedSourceFile';
|
||||
@@ -35,7 +34,7 @@ export interface FullLocationLink extends LocationLink {
|
||||
*/
|
||||
export async function getLocationsForUriString(
|
||||
cli: CodeQLCliServer,
|
||||
qs: QueryServerClient,
|
||||
qs: QueryRunner,
|
||||
dbm: DatabaseManager,
|
||||
uriString: string,
|
||||
keyType: KeyType,
|
||||
@@ -65,19 +64,8 @@ export async function getLocationsForUriString(
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
const results = await compileAndRunQueryAgainstDatabase(
|
||||
cli,
|
||||
qs,
|
||||
db,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
);
|
||||
|
||||
if (results.result.resultType == messages.QueryResultType.SUCCESS) {
|
||||
const results = await qs.compileAndRunQueryAgainstDatabase(db, initialInfo, queryStorageDir, progress, token, templates);
|
||||
if (results.successful) {
|
||||
links.push(...await getLinksFromResults(results, cli, db, filter));
|
||||
}
|
||||
}
|
||||
@@ -114,15 +102,9 @@ async function getLinksFromResults(
|
||||
return localLinks;
|
||||
}
|
||||
|
||||
function createTemplates(path: string): messages.TemplateDefinitions {
|
||||
function createTemplates(path: string): Record<string, string> {
|
||||
return {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: path
|
||||
}]]
|
||||
}
|
||||
}
|
||||
[TEMPLATE_NAME]: path
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -16,9 +16,6 @@ import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager } from '../databases';
|
||||
import { CachedOperation } from '../helpers';
|
||||
import { ProgressCallback, withProgress } from '../commandRunner';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo, QueryWithResults } from '../run-queries';
|
||||
import AstBuilder from './astBuilder';
|
||||
import {
|
||||
KeyType,
|
||||
@@ -26,6 +23,8 @@ import {
|
||||
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
|
||||
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
|
||||
/**
|
||||
* Run templated CodeQL queries to find definitions and references in
|
||||
@@ -39,7 +38,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
@@ -83,7 +82,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
@@ -137,7 +136,7 @@ export class TemplatePrintAstProvider {
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
@@ -195,14 +194,9 @@ export class TemplatePrintAstProvider {
|
||||
}
|
||||
|
||||
const query = queries[0];
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
const templates: Record<string, string> = {
|
||||
[TEMPLATE_NAME]:
|
||||
zippedArchive.pathWithinSourceArchive
|
||||
};
|
||||
|
||||
const initialInfo = await createInitialQueryInfo(
|
||||
@@ -215,9 +209,7 @@ export class TemplatePrintAstProvider {
|
||||
);
|
||||
|
||||
return {
|
||||
query: await compileAndRunQueryAgainstDatabase(
|
||||
this.cli,
|
||||
this.qs,
|
||||
query: await this.qs.compileAndRunQueryAgainstDatabase(
|
||||
db,
|
||||
initialInfo,
|
||||
this.queryStorageDir,
|
||||
@@ -231,23 +223,23 @@ export class TemplatePrintAstProvider {
|
||||
}
|
||||
|
||||
export class TemplatePrintCfgProvider {
|
||||
private cache: CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>;
|
||||
private cache: CachedOperation<[Uri, Record<string, string>] | undefined>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private dbm: DatabaseManager,
|
||||
) {
|
||||
this.cache = new CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>(this.getCfgUri.bind(this));
|
||||
this.cache = new CachedOperation<[Uri, Record<string, string>] | undefined>(this.getCfgUri.bind(this));
|
||||
}
|
||||
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, messages.TemplateDefinitions] | undefined> {
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, Record<string, string>] | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
return await this.cache.get(document.uri.toString());
|
||||
}
|
||||
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, messages.TemplateDefinitions]> {
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, Record<string, string>]> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||
@@ -275,14 +267,8 @@ export class TemplatePrintCfgProvider {
|
||||
|
||||
const queryUri = Uri.file(queries[0]);
|
||||
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
const templates: Record<string, string> = {
|
||||
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive
|
||||
};
|
||||
|
||||
return [queryUri, templates];
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as Octokit from '@octokit/rest';
|
||||
import { retry } from '@octokit/plugin-retry';
|
||||
|
||||
import { DatabaseManager, DatabaseItem } from './databases';
|
||||
import {
|
||||
@@ -76,7 +78,7 @@ export async function promptImportInternetDatabase(
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials,
|
||||
credentials: Credentials | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
@@ -99,14 +101,15 @@ export async function promptImportGithubDatabase(
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, credentials, progress);
|
||||
const octokit = credentials ? await credentials.getOctokit(true) : new Octokit.Octokit({ retry });
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, octokit, progress);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { databaseUrl, name, owner } = result;
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
@@ -118,14 +121,9 @@ export async function promptImportGithubDatabase(
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
if (!octokitToken) {
|
||||
// Just print a generic error message for now. Ideally we could show more debugging info, like the
|
||||
// octokit object, but that would expose a user token.
|
||||
throw new Error('Unable to get GitHub token.');
|
||||
}
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': `Bearer ${octokitToken}` },
|
||||
{ 'Accept': 'application/zip', 'Authorization': octokitToken ? `Bearer ${octokitToken}` : '' },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
`${owner}/${name}`,
|
||||
@@ -523,7 +521,7 @@ function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
credentials: Credentials,
|
||||
octokit: Octokit.Octokit,
|
||||
progress: ProgressCallback): Promise<{
|
||||
databaseUrl: string,
|
||||
owner: string,
|
||||
@@ -533,7 +531,6 @@ export async function convertGithubNwoToDatabaseUrl(
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
@@ -28,9 +28,6 @@ import {
|
||||
showAndLogErrorMessage
|
||||
} from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { clearCacheInDatabase } from './run-queries';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
@@ -40,6 +37,8 @@ import {
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { isCanary } from './config';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -219,7 +218,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
public constructor(
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly queryServer: QueryRunner | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
@@ -301,7 +300,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await this.getCredentials();
|
||||
const credentials = isCanary() ? await this.getCredentials() : undefined;
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
@@ -389,12 +388,11 @@ export class DatabaseUI extends DisposableObject {
|
||||
handleChooseDatabaseFolder = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
): Promise<void> => {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -457,12 +455,11 @@ export class DatabaseUI extends DisposableObject {
|
||||
handleChooseDatabaseArchive = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
): Promise<void> => {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -480,7 +477,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials,
|
||||
credentials: Credentials | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
@@ -575,8 +572,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
// Search for upgrade scripts in any workspace folders available
|
||||
|
||||
await upgradeDatabaseExplicit(
|
||||
this.queryServer,
|
||||
await this.queryServer.upgradeDatabaseExplicit(
|
||||
databaseItem,
|
||||
progress,
|
||||
token
|
||||
@@ -591,8 +587,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.queryServer !== undefined &&
|
||||
this.databaseManager.currentDatabaseItem !== undefined
|
||||
) {
|
||||
await clearCacheInDatabase(
|
||||
this.queryServer,
|
||||
await this.queryServer.clearCacheInDatabase(
|
||||
this.databaseManager.currentDatabaseItem,
|
||||
progress,
|
||||
token
|
||||
@@ -755,7 +750,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
* Perform some heuristics to ensure a proper database location is chosen.
|
||||
*
|
||||
* 1. If the selected URI to add is a file, choose the containing directory
|
||||
* 2. If the selected URI is a directory matching db-*, choose the containing directory
|
||||
* 2. If the selected URI appears to be a db language folder, choose the containing directory
|
||||
* 3. choose the current directory
|
||||
*
|
||||
* @param uri a URI that is a database folder or inside it
|
||||
@@ -768,7 +763,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
dbPath = path.dirname(dbPath);
|
||||
}
|
||||
|
||||
if (isLikelyDbLanguageFolder(dbPath)) {
|
||||
if (await isLikelyDbLanguageFolder(dbPath)) {
|
||||
dbPath = path.dirname(dbPath);
|
||||
}
|
||||
return Uri.file(dbPath);
|
||||
|
||||
@@ -17,9 +17,8 @@ import {
|
||||
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -359,14 +358,12 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
try {
|
||||
this._contents = await resolveDatabaseContents(this.databaseUri);
|
||||
this._error = undefined;
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
this.onChanged({
|
||||
kind: DatabaseEventKind.Refresh,
|
||||
item: this
|
||||
@@ -555,16 +552,13 @@ export class DatabaseManager extends DisposableObject {
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly qs: QueryServerClient,
|
||||
private readonly qs: QueryRunner,
|
||||
private readonly cli: cli.CodeQLCliServer,
|
||||
public logger: Logger
|
||||
) {
|
||||
super();
|
||||
|
||||
qs.onDidStartQueryServer(this.reregisterDatabases.bind(this));
|
||||
|
||||
// Let this run async.
|
||||
void this.loadPersistedState();
|
||||
qs.onStart(this.reregisterDatabases.bind(this));
|
||||
}
|
||||
|
||||
public async openDatabase(
|
||||
@@ -694,7 +688,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
return item;
|
||||
}
|
||||
|
||||
private async loadPersistedState(): Promise<void> {
|
||||
public async loadPersistedState(): Promise<void> {
|
||||
return withProgress({
|
||||
location: vscode.ProgressLocation.Notification
|
||||
},
|
||||
@@ -708,6 +702,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
step
|
||||
});
|
||||
try {
|
||||
void this.logger.log(`Found ${databases.length} persisted databases: ${databases.map(db => db.uri).join(', ')}`);
|
||||
for (const database of databases) {
|
||||
progress({
|
||||
maxStep: databases.length,
|
||||
@@ -722,16 +717,19 @@ export class DatabaseManager extends DisposableObject {
|
||||
if (currentDatabaseUri === database.uri) {
|
||||
await this.setCurrentDatabaseItem(databaseItem, true);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
void this.logger.log(`Loaded database ${databaseItem.name} at URI ${database.uri}.`);
|
||||
} catch (e) {
|
||||
// When loading from persisted state, leave invalid databases in the list. They will be
|
||||
// marked as invalid, and cannot be set as the current database.
|
||||
void this.logger.log(`Error loading database ${database.uri}: ${e}.`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
void this.logger.log('Finished loading persisted databases.');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -860,27 +858,14 @@ export class DatabaseManager extends DisposableObject {
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
await this.qs.deregisterDatabase(progress, token, dbItem);
|
||||
}
|
||||
|
||||
private async registerDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
await this.qs.registerDatabase(progress, token, dbItem);
|
||||
}
|
||||
|
||||
private updatePersistedCurrentDatabaseItem(): void {
|
||||
|
||||
67
extensions/ql-vscode/src/eval-log-tree-builder.ts
Normal file
67
extensions/ql-vscode/src/eval-log-tree-builder.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { ChildEvalLogTreeItem, EvalLogTreeItem } from './eval-log-viewer';
|
||||
import { EvalLogData as EvalLogData } from './pure/log-summary-parser';
|
||||
|
||||
/** Builds the tree data for the evaluator log viewer for a single query run. */
|
||||
export default class EvalLogTreeBuilder {
|
||||
private queryName: string;
|
||||
private evalLogDataItems: EvalLogData[];
|
||||
|
||||
constructor(queryName: string, evaluatorLogDataItems: EvalLogData[]) {
|
||||
this.queryName = queryName;
|
||||
this.evalLogDataItems = evaluatorLogDataItems;
|
||||
}
|
||||
|
||||
async getRoots(): Promise<EvalLogTreeItem[]> {
|
||||
return await this.parseRoots();
|
||||
}
|
||||
|
||||
private async parseRoots(): Promise<EvalLogTreeItem[]> {
|
||||
const roots: EvalLogTreeItem[] = [];
|
||||
|
||||
// Once the viewer can show logs for multiple queries, there will be more than 1 item at the root
|
||||
// level. For now, there will always be one root (the one query being shown).
|
||||
const queryItem: EvalLogTreeItem = {
|
||||
label: this.queryName,
|
||||
children: [] // Will assign predicate items as children shortly.
|
||||
};
|
||||
|
||||
// Display descriptive message when no data exists
|
||||
if (this.evalLogDataItems.length === 0) {
|
||||
const noResultsItem: ChildEvalLogTreeItem = {
|
||||
label: 'No predicates evaluated in this query run.',
|
||||
parent: queryItem,
|
||||
children: [],
|
||||
};
|
||||
queryItem.children.push(noResultsItem);
|
||||
}
|
||||
|
||||
// For each predicate, create a TreeItem object with appropriate parents/children
|
||||
this.evalLogDataItems.forEach(logDataItem => {
|
||||
const predicateLabel = `${logDataItem.predicateName} (${logDataItem.resultSize} tuples, ${logDataItem.millis} ms)`;
|
||||
const predicateItem: ChildEvalLogTreeItem = {
|
||||
label: predicateLabel,
|
||||
parent: queryItem,
|
||||
children: [] // Will assign pipeline items as children shortly.
|
||||
};
|
||||
for (const [pipelineName, steps] of Object.entries(logDataItem.ra)) {
|
||||
const pipelineLabel = `Pipeline: ${pipelineName}`;
|
||||
const pipelineItem: ChildEvalLogTreeItem = {
|
||||
label: pipelineLabel,
|
||||
parent: predicateItem,
|
||||
children: [] // Will assign step items as children shortly.
|
||||
};
|
||||
predicateItem.children.push(pipelineItem);
|
||||
|
||||
pipelineItem.children = steps.map((step: string) => ({
|
||||
label: step,
|
||||
parent: pipelineItem,
|
||||
children: []
|
||||
}));
|
||||
}
|
||||
queryItem.children.push(predicateItem);
|
||||
});
|
||||
|
||||
roots.push(queryItem);
|
||||
return roots;
|
||||
}
|
||||
}
|
||||
92
extensions/ql-vscode/src/eval-log-viewer.ts
Normal file
92
extensions/ql-vscode/src/eval-log-viewer.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { window, TreeDataProvider, TreeView, TreeItem, ProviderResult, Event, EventEmitter, TreeItemCollapsibleState } from 'vscode';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
|
||||
export interface EvalLogTreeItem {
|
||||
label?: string;
|
||||
children: ChildEvalLogTreeItem[];
|
||||
}
|
||||
|
||||
export interface ChildEvalLogTreeItem extends EvalLogTreeItem {
|
||||
parent: ChildEvalLogTreeItem | EvalLogTreeItem;
|
||||
}
|
||||
|
||||
/** Provides data from parsed CodeQL evaluator logs to be rendered in a tree view. */
|
||||
class EvalLogDataProvider extends DisposableObject implements TreeDataProvider<EvalLogTreeItem> {
|
||||
public roots: EvalLogTreeItem[] = [];
|
||||
|
||||
private _onDidChangeTreeData: EventEmitter<EvalLogTreeItem | undefined | null | void> = new EventEmitter<EvalLogTreeItem | undefined | null | void>();
|
||||
readonly onDidChangeTreeData: Event<EvalLogTreeItem | undefined | null | void> = this._onDidChangeTreeData.event;
|
||||
|
||||
refresh(): void {
|
||||
this._onDidChangeTreeData.fire();
|
||||
}
|
||||
|
||||
getTreeItem(element: EvalLogTreeItem): TreeItem | Thenable<TreeItem> {
|
||||
const state = element.children.length
|
||||
? TreeItemCollapsibleState.Collapsed
|
||||
: TreeItemCollapsibleState.None;
|
||||
const treeItem = new TreeItem(element.label || '', state);
|
||||
treeItem.tooltip = `${treeItem.label} || ''}`;
|
||||
return treeItem;
|
||||
}
|
||||
|
||||
getChildren(element?: EvalLogTreeItem): ProviderResult<EvalLogTreeItem[]> {
|
||||
// If no item is passed, return the root.
|
||||
if (!element) {
|
||||
return this.roots || [];
|
||||
}
|
||||
// Otherwise it is called with an existing item, to load its children.
|
||||
return element.children;
|
||||
}
|
||||
|
||||
getParent(element: ChildEvalLogTreeItem): ProviderResult<EvalLogTreeItem> {
|
||||
return element.parent;
|
||||
}
|
||||
}
|
||||
|
||||
/** Manages a tree viewer of structured evaluator logs. */
|
||||
export class EvalLogViewer extends DisposableObject {
|
||||
private treeView: TreeView<EvalLogTreeItem>;
|
||||
private treeDataProvider: EvalLogDataProvider;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.treeDataProvider = new EvalLogDataProvider();
|
||||
this.treeView = window.createTreeView('codeQLEvalLogViewer', {
|
||||
treeDataProvider: this.treeDataProvider,
|
||||
showCollapseAll: true
|
||||
});
|
||||
|
||||
this.push(this.treeView);
|
||||
this.push(this.treeDataProvider);
|
||||
this.push(
|
||||
commandRunner('codeQLEvalLogViewer.clear', async () => {
|
||||
this.clear();
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private clear(): void {
|
||||
this.treeDataProvider.roots = [];
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
}
|
||||
|
||||
// Called when the Show Evaluator Log (UI) command is run on a new query.
|
||||
updateRoots(roots: EvalLogTreeItem[]): void {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.refresh();
|
||||
|
||||
this.treeView.message = 'Viewer for query run:'; // Currently only one query supported at a time.
|
||||
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
() => { /**/ },
|
||||
err => showAndLogErrorMessage(err)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -68,17 +68,17 @@ import {
|
||||
} from './helpers';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { ResultsView } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
import { ideServerLogger, logger, queryServerLogger } from './logging';
|
||||
import { ideServerLogger, logger, ProgressReporter, queryServerLogger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import * as legacyQueryServer from './legacy-query-server/queryserver-client';
|
||||
import * as newQueryServer from './query-server/queryserver-client';
|
||||
import { displayQuickQuery } from './quick-query';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from './run-queries';
|
||||
import { QLTestAdapterFactory } from './test-adapter';
|
||||
import { TestUIService } from './test-ui';
|
||||
import { CompareInterfaceManager } from './compare/compare-interface';
|
||||
import { CompareView } from './compare/compare-view';
|
||||
import { gatherQlFiles } from './pure/files';
|
||||
import { initializeTelemetry } from './telemetry';
|
||||
import {
|
||||
@@ -98,6 +98,23 @@ import { handleDownloadPacks, handleInstallPackDependencies } from './packaging'
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { exportRemoteQueryResults } from './remote-queries/export-results';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
import { EvalLogViewer } from './eval-log-viewer';
|
||||
import { SummaryLanguageSupport } from './log-insights/summary-language-support';
|
||||
import { JoinOrderScannerProvider } from './log-insights/join-order';
|
||||
import { LogScannerService } from './log-insights/log-scanner-service';
|
||||
import { createInitialQueryInfo } from './run-queries-shared';
|
||||
import { LegacyQueryRunner } from './legacy-query-server/legacyRunner';
|
||||
import { NewQueryRunner } from './query-server/query-runner';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { VariantAnalysisView } from './remote-queries/variant-analysis-view';
|
||||
import { VariantAnalysisViewSerializer } from './remote-queries/variant-analysis-view-serializer';
|
||||
import { VariantAnalysis } from './remote-queries/shared/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis as VariantAnalysisApiResponse,
|
||||
VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository
|
||||
} from './remote-queries/gh-api/variant-analysis';
|
||||
import { VariantAnalysisManager } from './remote-queries/variant-analysis-manager';
|
||||
import { createVariantAnalysisContentProvider } from './remote-queries/variant-analysis-content-provider';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
@@ -160,10 +177,11 @@ function registerErrorStubs(excludedCommands: string[], stubGenerator: (command:
|
||||
export interface CodeQLExtensionInterface {
|
||||
readonly ctx: ExtensionContext;
|
||||
readonly cliServer: CodeQLCliServer;
|
||||
readonly qs: qsClient.QueryServerClient;
|
||||
readonly qs: QueryRunner;
|
||||
readonly distributionManager: DistributionManager;
|
||||
readonly databaseManager: DatabaseManager;
|
||||
readonly databaseUI: DatabaseUI;
|
||||
readonly variantAnalysisManager: VariantAnalysisManager;
|
||||
readonly dispose: () => void;
|
||||
}
|
||||
|
||||
@@ -374,7 +392,10 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
allowAutoUpdating: true
|
||||
})));
|
||||
|
||||
return await installOrUpdateThenTryActivate({
|
||||
const variantAnalysisViewSerializer = new VariantAnalysisViewSerializer(ctx);
|
||||
Window.registerWebviewPanelSerializer(VariantAnalysisView.viewType, variantAnalysisViewSerializer);
|
||||
|
||||
const codeQlExtension = await installOrUpdateThenTryActivate({
|
||||
isUserInitiated: !!ctx.globalState.get(shouldUpdateOnNextActivationKey),
|
||||
shouldDisplayMessageWhenNoUpdates: false,
|
||||
|
||||
@@ -382,8 +403,14 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
// otherwise, ask user to accept the update
|
||||
allowAutoUpdating: !!ctx.globalState.get(shouldUpdateOnNextActivationKey)
|
||||
});
|
||||
|
||||
variantAnalysisViewSerializer.onExtensionLoaded(codeQlExtension.variantAnalysisManager);
|
||||
|
||||
return codeQlExtension;
|
||||
}
|
||||
|
||||
const PACK_GLOBS = ['**/codeql-pack.yml', '**/qlpack.yml', '**/queries.xml', '**/codeql-pack.lock.yml', '**/qlpack.lock.yml', '.codeqlmanifest.json', 'codeql-workspace.yml'];
|
||||
|
||||
async function activateWithInstalledDistribution(
|
||||
ctx: ExtensionContext,
|
||||
distributionManager: DistributionManager,
|
||||
@@ -412,24 +439,23 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(statusBar);
|
||||
|
||||
void logger.log('Initializing query server client.');
|
||||
const qs = new qsClient.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
{
|
||||
logger: queryServerLogger,
|
||||
contextStoragePath: getContextStoragePath(ctx),
|
||||
},
|
||||
(task) =>
|
||||
Window.withProgress(
|
||||
{ title: 'CodeQL query server', location: ProgressLocation.Window },
|
||||
task
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
const qs = await createQueryServer(qlConfigurationListener, cliServer, ctx);
|
||||
|
||||
|
||||
for (const glob of PACK_GLOBS) {
|
||||
const fsWatcher = workspace.createFileSystemWatcher(glob);
|
||||
ctx.subscriptions.push(fsWatcher);
|
||||
fsWatcher.onDidChange(async (_uri) => {
|
||||
await qs.clearPackCache();
|
||||
});
|
||||
}
|
||||
|
||||
void logger.log('Initializing database manager.');
|
||||
const dbm = new DatabaseManager(ctx, qs, cliServer, logger);
|
||||
|
||||
// Let this run async.
|
||||
void dbm.loadPersistedState();
|
||||
|
||||
ctx.subscriptions.push(dbm);
|
||||
void logger.log('Initializing database panel.');
|
||||
const databaseUI = new DatabaseUI(
|
||||
@@ -442,6 +468,10 @@ async function activateWithInstalledDistribution(
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
|
||||
void logger.log('Initializing evaluator log viewer.');
|
||||
const evalLogViewer = new EvalLogViewer();
|
||||
ctx.subscriptions.push(evalLogViewer);
|
||||
|
||||
void logger.log('Initializing query history manager.');
|
||||
const queryHistoryConfigurationListener = new QueryHistoryConfigListener();
|
||||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
@@ -452,19 +482,28 @@ async function activateWithInstalledDistribution(
|
||||
const labelProvider = new HistoryItemLabelProvider(queryHistoryConfigurationListener);
|
||||
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(intm);
|
||||
const localQueryResultsView = new ResultsView(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(localQueryResultsView);
|
||||
|
||||
void logger.log('Initializing variant analysis manager.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, queryStorageDir, logger);
|
||||
const variantAnalysisStorageDir = path.join(ctx.globalStorageUri.fsPath, 'variant-analyses');
|
||||
await fs.ensureDir(variantAnalysisStorageDir);
|
||||
const variantAnalysisManager = new VariantAnalysisManager(ctx, cliServer, variantAnalysisStorageDir, logger);
|
||||
ctx.subscriptions.push(variantAnalysisManager);
|
||||
ctx.subscriptions.push(workspace.registerTextDocumentContentProvider('codeql-variant-analysis', createVariantAnalysisContentProvider(variantAnalysisManager)));
|
||||
|
||||
void logger.log('Initializing remote queries manager.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, queryStorageDir, logger, variantAnalysisManager);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
void logger.log('Initializing query history.');
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
dbm,
|
||||
intm,
|
||||
localQueryResultsView,
|
||||
rqm,
|
||||
variantAnalysisManager,
|
||||
evalLogViewer,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
@@ -476,11 +515,16 @@ async function activateWithInstalledDistribution(
|
||||
|
||||
ctx.subscriptions.push(qhm);
|
||||
|
||||
void logger.log('Initializing evaluation log scanners.');
|
||||
const logScannerService = new LogScannerService(qhm);
|
||||
ctx.subscriptions.push(logScannerService);
|
||||
ctx.subscriptions.push(logScannerService.scanners.registerLogScannerProvider(new JoinOrderScannerProvider()));
|
||||
|
||||
void logger.log('Reading query history');
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
void logger.log('Initializing compare panel interface.');
|
||||
const cmpm = new CompareInterfaceManager(
|
||||
void logger.log('Initializing compare view.');
|
||||
const compareView = new CompareView(
|
||||
ctx,
|
||||
dbm,
|
||||
cliServer,
|
||||
@@ -488,7 +532,7 @@ async function activateWithInstalledDistribution(
|
||||
labelProvider,
|
||||
showResults
|
||||
);
|
||||
ctx.subscriptions.push(cmpm);
|
||||
ctx.subscriptions.push(compareView);
|
||||
|
||||
void logger.log('Initializing source archive filesystem provider.');
|
||||
archiveFilesystemProvider.activate(ctx);
|
||||
@@ -498,7 +542,7 @@ async function activateWithInstalledDistribution(
|
||||
to: CompletedLocalQueryInfo
|
||||
): Promise<void> {
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
await compareView.showResults(from, to);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
@@ -508,7 +552,7 @@ async function activateWithInstalledDistribution(
|
||||
query: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal
|
||||
): Promise<void> {
|
||||
await intm.showResults(query, forceReveal, false);
|
||||
await localQueryResultsView.showResults(query, forceReveal, false);
|
||||
}
|
||||
|
||||
async function compileAndRunQuery(
|
||||
@@ -538,9 +582,7 @@ async function activateWithInstalledDistribution(
|
||||
const item = new LocalQueryInfo(initialInfo, source);
|
||||
qhm.addQuery(item);
|
||||
try {
|
||||
const completedQueryInfo = await compileAndRunQueryAgainstDatabase(
|
||||
cliServer,
|
||||
qs,
|
||||
const completedQueryInfo = await qs.compileAndRunQueryAgainstDatabase(
|
||||
databaseItem,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
@@ -549,8 +591,8 @@ async function activateWithInstalledDistribution(
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
item.completeThisQuery(completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
|
||||
qhm.completeQuery(item, completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.Forced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
@@ -768,12 +810,13 @@ async function activateWithInstalledDistribution(
|
||||
});
|
||||
}
|
||||
|
||||
if (queryUris.length > 1) {
|
||||
if (queryUris.length > 1 && !await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
// Try to upgrade the current database before running any queries
|
||||
// so that the user isn't confronted with multiple upgrade
|
||||
// requests for each query to run.
|
||||
// Only do it if running multiple queries since this check is
|
||||
// performed on each query run anyway.
|
||||
// Don't do this with non destructive upgrades as the user won't see anything anyway.
|
||||
await databaseUI.tryUpgradeCurrentDatabase(progress, token);
|
||||
}
|
||||
|
||||
@@ -892,6 +935,31 @@ async function activateWithInstalledDistribution(
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorVariantAnalysis', async (
|
||||
variantAnalysis: VariantAnalysis,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await variantAnalysisManager.monitorVariantAnalysis(variantAnalysis, token);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadVariantAnalysisResult', async (
|
||||
scannedRepo: ApiVariantAnalysisScannedRepository,
|
||||
variantAnalysisSummary: VariantAnalysisApiResponse,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await variantAnalysisManager.autoDownloadVariantAnalysisResult(scannedRepo, variantAnalysisSummary, token);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.openVariantAnalysis', async () => {
|
||||
await variantAnalysisManager.promptOpenVariantAnalysis();
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadRemoteQueryResults', async (
|
||||
queryResult: RemoteQueryResult,
|
||||
@@ -900,8 +968,21 @@ async function activateWithInstalledDistribution(
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async () => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx);
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async (queryId?: string) => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx, queryId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.loadVariantAnalysisRepoResults', async (variantAnalysisId: number, repositoryFullName: string) => {
|
||||
await variantAnalysisManager.loadResults(variantAnalysisId, repositoryFullName);
|
||||
})
|
||||
);
|
||||
|
||||
// The "openVariantAnalysisView" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.openVariantAnalysisView', async (variantAnalysisId: number) => {
|
||||
await variantAnalysisManager.showView(variantAnalysisId);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -924,6 +1005,8 @@ async function activateWithInstalledDistribution(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
// We restart the CLI server too, to ensure they are the same version
|
||||
cliServer.restartCliServer();
|
||||
await qs.restartQueryServer(progress, token);
|
||||
void showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
outputLogger: queryServerLogger,
|
||||
@@ -956,7 +1039,7 @@ async function activateWithInstalledDistribution(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const credentials = isCanary() ? await Credentials.initialize(ctx) : undefined;
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
@@ -1004,19 +1087,16 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
};
|
||||
|
||||
// The "authenticateToGitHub" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.authenticateToGitHub', async () => {
|
||||
if (isCanary()) {
|
||||
/**
|
||||
* Credentials for authenticating to GitHub.
|
||||
* These are used when making API calls.
|
||||
*/
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}
|
||||
/**
|
||||
* Credentials for authenticating to GitHub.
|
||||
* These are used when making API calls.
|
||||
*/
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -1045,6 +1125,8 @@ async function activateWithInstalledDistribution(
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(new SummaryLanguageSupport());
|
||||
|
||||
void logger.log('Starting language server.');
|
||||
ctx.subscriptions.push(client.start());
|
||||
|
||||
@@ -1118,12 +1200,46 @@ async function activateWithInstalledDistribution(
|
||||
distributionManager,
|
||||
databaseManager: dbm,
|
||||
databaseUI,
|
||||
variantAnalysisManager,
|
||||
dispose: () => {
|
||||
ctx.subscriptions.forEach(d => d.dispose());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function createQueryServer(qlConfigurationListener: QueryServerConfigListener, cliServer: CodeQLCliServer, ctx: ExtensionContext): Promise<QueryRunner> {
|
||||
const qsOpts = {
|
||||
logger: queryServerLogger,
|
||||
contextStoragePath: getContextStoragePath(ctx),
|
||||
};
|
||||
const progressCallback = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Window.withProgress(
|
||||
{ title: 'CodeQL query server', location: ProgressLocation.Window },
|
||||
task
|
||||
);
|
||||
if (await cliServer.cliConstraints.supportsNewQueryServer()) {
|
||||
const qs = new newQueryServer.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
progressCallback
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
return new NewQueryRunner(qs);
|
||||
|
||||
} else {
|
||||
const qs = new legacyQueryServer.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
progressCallback
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
return new LegacyQueryRunner(qs);
|
||||
}
|
||||
}
|
||||
|
||||
function getContextStoragePath(ctx: ExtensionContext) {
|
||||
return ctx.storageUri?.fsPath || ctx.globalStorageUri.fsPath;
|
||||
}
|
||||
|
||||
@@ -470,9 +470,9 @@ export function getInitialQueryContents(language: string, dbscheme: string) {
|
||||
|
||||
/**
|
||||
* Heuristically determines if the directory passed in corresponds
|
||||
* to a database root.
|
||||
*
|
||||
* @param maybeRoot
|
||||
* to a database root. A database root is a directory that contains
|
||||
* a codeql-database.yml or (historically) a .dbinfo file. It also
|
||||
* contains a folder starting with `db-`.
|
||||
*/
|
||||
export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
const [a, b, c] = (await Promise.all([
|
||||
@@ -484,11 +484,14 @@ export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
glob('db-*/', { cwd: maybeRoot })
|
||||
]));
|
||||
|
||||
return !!((a || b) && c);
|
||||
return ((a || b) && c.length > 0);
|
||||
}
|
||||
|
||||
export function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return !!path.basename(dbPath).startsWith('db-');
|
||||
/**
|
||||
* A language folder is any folder starting with `db-` that is itself not a database root.
|
||||
*/
|
||||
export async function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return path.basename(dbPath).startsWith('db-') && !(await isLikelyDatabaseRoot(dbPath));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { getRawQueryName, QueryHistoryInfo } from './query-history-info';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { pluralize } from './helpers';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
@@ -21,9 +24,20 @@ export class HistoryItemLabelProvider {
|
||||
}
|
||||
|
||||
getLabel(item: QueryHistoryInfo) {
|
||||
const replacements = item.t === 'local'
|
||||
? this.getLocalInterpolateReplacements(item)
|
||||
: this.getRemoteInterpolateReplacements(item);
|
||||
let replacements: InterpolateReplacements;
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
replacements = this.getLocalInterpolateReplacements(item);
|
||||
break;
|
||||
case 'remote':
|
||||
replacements = this.getRemoteInterpolateReplacements(item);
|
||||
break;
|
||||
case 'variant-analysis':
|
||||
replacements = this.getVariantAnalysisInterpolateReplacements(item);
|
||||
break;
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
|
||||
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||
|
||||
@@ -39,17 +53,17 @@ export class HistoryItemLabelProvider {
|
||||
getShortLabel(item: QueryHistoryInfo): string {
|
||||
return item.userSpecifiedLabel
|
||||
? this.getLabel(item)
|
||||
: item.t === 'local'
|
||||
? item.getQueryName()
|
||||
: item.remoteQuery.queryName;
|
||||
: getRawQueryName(item);
|
||||
}
|
||||
|
||||
|
||||
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||
return rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const label = rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
|
||||
return label.replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||
@@ -77,14 +91,28 @@ export class HistoryItemLabelProvider {
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||
d: this.buildRepoLabel(item),
|
||||
r: `(${pluralize(item.resultCount, 'result', 'results')})`,
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
'%': '%'
|
||||
};
|
||||
}
|
||||
|
||||
private getVariantAnalysisInterpolateReplacements(item: VariantAnalysisHistoryItem): InterpolateReplacements {
|
||||
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||
return {
|
||||
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.query.language})`,
|
||||
d: 'TODO',
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.variantAnalysis.query.filePath),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
Uri,
|
||||
Location,
|
||||
Range,
|
||||
ExtensionContext,
|
||||
WebviewPanel,
|
||||
Webview,
|
||||
workspace,
|
||||
@@ -111,16 +112,36 @@ export function tryResolveLocation(
|
||||
}
|
||||
}
|
||||
|
||||
export type WebviewView = 'results' | 'compare' | 'remote-queries' | 'variant-analysis';
|
||||
|
||||
export interface WebviewMessage {
|
||||
t: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTML to populate the given webview.
|
||||
* Uses a content security policy that only loads the given script.
|
||||
*/
|
||||
export function getHtmlForWebview(
|
||||
ctx: ExtensionContext,
|
||||
webview: Webview,
|
||||
scriptUriOnDisk: Uri,
|
||||
stylesheetUrisOnDisk: Uri[],
|
||||
allowInlineStyles: boolean
|
||||
view: WebviewView,
|
||||
{
|
||||
allowInlineStyles,
|
||||
}: {
|
||||
allowInlineStyles?: boolean;
|
||||
} = {
|
||||
allowInlineStyles: false,
|
||||
}
|
||||
): string {
|
||||
const scriptUriOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/webview.js')
|
||||
);
|
||||
|
||||
const stylesheetUrisOnDisk = [
|
||||
Uri.file(ctx.asAbsolutePath('out/webview.css'))
|
||||
];
|
||||
|
||||
// Convert the on-disk URIs into webview URIs.
|
||||
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
|
||||
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(stylesheetUriOnDisk =>
|
||||
@@ -155,7 +176,7 @@ export function getHtmlForWebview(
|
||||
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||
</head>
|
||||
<body>
|
||||
<div id=root>
|
||||
<div id=root data-view="${view}">
|
||||
</div>
|
||||
<script nonce="${nonce}" src="${scriptWebviewUri}">
|
||||
</script>
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import * as path from 'path';
|
||||
import * as Sarif from 'sarif';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as vscode from 'vscode';
|
||||
import {
|
||||
Diagnostic,
|
||||
@@ -14,7 +12,7 @@ import {
|
||||
import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
@@ -31,22 +29,21 @@ import {
|
||||
RawResultsSortState,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
import { QueryEvaluationInfo } from './run-queries-shared';
|
||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||
import {
|
||||
WebviewReveal,
|
||||
fileUriToWebviewUri,
|
||||
tryResolveLocation,
|
||||
getHtmlForWebview,
|
||||
shownLocationDecoration,
|
||||
shownLocationLineDecoration,
|
||||
jumpToLocation,
|
||||
} from './interface-utils';
|
||||
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||
import { AbstractWebview, WebviewPanelConfig } from './abstract-webview';
|
||||
import { PAGE_SIZE } from './config';
|
||||
import { CompletedLocalQueryInfo } from './query-results';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
@@ -122,12 +119,9 @@ function numInterpretedPages(interpretation: Interpretation | undefined): number
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
export class InterfaceManager extends DisposableObject {
|
||||
export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResultsViewMsg> {
|
||||
private _displayedQuery?: CompletedLocalQueryInfo;
|
||||
private _interpretation?: Interpretation;
|
||||
private _panel: vscode.WebviewPanel | undefined;
|
||||
private _panelLoaded = false;
|
||||
private _panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
private readonly _diagnosticCollection = languages.createDiagnosticCollection(
|
||||
'codeql-query-results'
|
||||
@@ -140,7 +134,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
public logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider
|
||||
) {
|
||||
super();
|
||||
super(ctx);
|
||||
this.push(this._diagnosticCollection);
|
||||
this.push(
|
||||
vscode.window.onDidChangeTextEditorSelection(
|
||||
@@ -165,7 +159,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
||||
if (kind === DatabaseEventKind.Remove) {
|
||||
this._diagnosticCollection.clear();
|
||||
if (this.isShowingPanel()) {
|
||||
if (this.isShowingPanel) {
|
||||
void this.postMessage({
|
||||
t: 'untoggleShowProblems'
|
||||
});
|
||||
@@ -179,59 +173,81 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({ t: 'navigatePath', direction });
|
||||
}
|
||||
|
||||
private isShowingPanel() {
|
||||
return !!this._panel;
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: 'resultsView',
|
||||
title: 'CodeQL Query Results',
|
||||
viewColumn: this.chooseColumnForWebview(),
|
||||
preserveFocus: true,
|
||||
view: 'results',
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the webview panel, creating it if it doesn't already
|
||||
// exist.
|
||||
getPanel(): vscode.WebviewPanel {
|
||||
if (this._panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const webViewColumn = this.chooseColumnForWebview();
|
||||
const panel = (this._panel = Window.createWebviewPanel(
|
||||
'resultsView', // internal name
|
||||
'CodeQL Query Results', // user-visible name
|
||||
{ viewColumn: webViewColumn, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
vscode.Uri.file(tmpDir.name),
|
||||
vscode.Uri.file(path.join(this.ctx.extensionPath, 'out'))
|
||||
]
|
||||
}
|
||||
));
|
||||
protected onPanelDispose(): void {
|
||||
this._displayedQuery = undefined;
|
||||
}
|
||||
|
||||
this.push(this._panel.onDidDispose(
|
||||
() => {
|
||||
this._panel = undefined;
|
||||
this._displayedQuery = undefined;
|
||||
this._panelLoaded = false;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
));
|
||||
const scriptPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/resultsView.js')
|
||||
);
|
||||
const stylesheetPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[stylesheetPathOnDisk],
|
||||
false
|
||||
);
|
||||
this.push(panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
));
|
||||
protected async onMessage(msg: FromResultsViewMsg): Promise<void> {
|
||||
try {
|
||||
switch (msg.t) {
|
||||
case 'viewLoaded':
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
return this._panel;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -296,85 +312,6 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.showPageOfRawResults(resultSetName, 0, true);
|
||||
}
|
||||
|
||||
private async handleMsgFromView(msg: FromResultsViewMsg): Promise<void> {
|
||||
try {
|
||||
switch (msg.t) {
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'resultViewLoaded':
|
||||
this._panelLoaded = true;
|
||||
this._panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this._panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
postMessage(msg: IntoResultsViewMsg): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this._panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this._panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Show query results in webview panel.
|
||||
* @param fullQuery Evaluation info for the executed query.
|
||||
@@ -389,7 +326,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
forceReveal: WebviewReveal,
|
||||
shouldKeepOldResultsWhileRendering = false
|
||||
): Promise<void> {
|
||||
if (fullQuery.completedQuery.result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
if (!fullQuery.completedQuery.successful) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
30
extensions/ql-vscode/src/json-rpc-server.ts
Normal file
30
extensions/ql-vscode/src/json-rpc-server.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Logger } from './logging';
|
||||
import * as cp from 'child_process';
|
||||
import { Disposable } from 'vscode';
|
||||
import { MessageConnection } from 'vscode-jsonrpc';
|
||||
|
||||
|
||||
/** A running query server process and its associated message connection. */
|
||||
export class ServerProcess implements Disposable {
|
||||
child: cp.ChildProcess;
|
||||
connection: MessageConnection;
|
||||
logger: Logger;
|
||||
|
||||
constructor(child: cp.ChildProcess, connection: MessageConnection, private name: string, logger: Logger) {
|
||||
this.child = child;
|
||||
this.connection = connection;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
void this.logger.log(`Stopping ${this.name}...`);
|
||||
this.connection.dispose();
|
||||
this.child.stdin!.end();
|
||||
this.child.stderr!.destroy();
|
||||
// TODO kill the process if it doesn't terminate after a certain time limit.
|
||||
|
||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||
this.child.stdout!.destroy();
|
||||
void this.logger.log(`Stopped ${this.name}.`);
|
||||
}
|
||||
}
|
||||
65
extensions/ql-vscode/src/legacy-query-server/legacyRunner.ts
Normal file
65
extensions/ql-vscode/src/legacy-query-server/legacyRunner.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { Dataset, deregisterDatabases, registerDatabases } from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { clearCacheInDatabase, compileAndRunQueryAgainstDatabase } from './run-queries';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
|
||||
export class LegacyQueryRunner extends QueryRunner {
|
||||
|
||||
|
||||
constructor(public readonly qs: QueryServerClient) {
|
||||
super();
|
||||
}
|
||||
|
||||
get cliServer() {
|
||||
return this.qs.cliServer;
|
||||
}
|
||||
|
||||
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await this.qs.restartQueryServer(progress, token);
|
||||
}
|
||||
|
||||
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
|
||||
this.qs.onDidStartQueryServer(callBack);
|
||||
}
|
||||
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await clearCacheInDatabase(this.qs, dbItem, progress, token);
|
||||
}
|
||||
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
|
||||
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
|
||||
}
|
||||
|
||||
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
|
||||
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await upgradeDatabaseExplicit(this.qs, dbItem, progress, token);
|
||||
}
|
||||
|
||||
async clearPackCache(): Promise<void> {
|
||||
/**
|
||||
* Nothing needs to be done
|
||||
*/
|
||||
}
|
||||
}
|
||||
@@ -1,49 +1,25 @@
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Disposable, CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from './cli';
|
||||
import { QueryServerConfig } from './config';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from './pure/messages';
|
||||
import * as messages from './pure/messages';
|
||||
import { ProgressCallback, ProgressTask } from './commandRunner';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from '../cli';
|
||||
import { QueryServerConfig } from '../config';
|
||||
import { Logger, ProgressReporter } from '../logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from '../pure/legacy-messages';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import { ProgressCallback, ProgressTask } from '../commandRunner';
|
||||
import { findQueryLogFile } from '../run-queries-shared';
|
||||
import { ServerProcess } from '../json-rpc-server';
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
contextStoragePath: string;
|
||||
}
|
||||
|
||||
/** A running query server process and its associated message connection. */
|
||||
class ServerProcess implements Disposable {
|
||||
child: cp.ChildProcess;
|
||||
connection: MessageConnection;
|
||||
logger: Logger;
|
||||
|
||||
constructor(child: cp.ChildProcess, connection: MessageConnection, logger: Logger) {
|
||||
this.child = child;
|
||||
this.connection = connection;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
void this.logger.log('Stopping query server...');
|
||||
this.connection.dispose();
|
||||
this.child.stdin!.end();
|
||||
this.child.stderr!.destroy();
|
||||
// TODO kill the process if it doesn't terminate after a certain time limit.
|
||||
|
||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||
this.child.stdout!.destroy();
|
||||
void this.logger.log('Stopped query server.');
|
||||
}
|
||||
}
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
/**
|
||||
* Client that manages a query server process.
|
||||
* The server process is started upon initialization and tracked during its lifetime.
|
||||
@@ -200,7 +176,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
callback(res);
|
||||
}
|
||||
});
|
||||
this.serverProcess = new ServerProcess(child, connection, this.logger);
|
||||
this.serverProcess = new ServerProcess(child, connection, 'Query server', this.logger);
|
||||
// Ensure the server process is disposed together with this client.
|
||||
this.track(this.serverProcess);
|
||||
connection.listen();
|
||||
@@ -254,23 +230,3 @@ export class QueryServerClient extends DisposableObject {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
export function findJsonQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
526
extensions/ql-vscode/src/legacy-query-server/run-queries.ts
Normal file
526
extensions/ql-vscode/src/legacy-query-server/run-queries.ts
Normal file
@@ -0,0 +1,526 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
CancellationToken,
|
||||
Uri,
|
||||
} from 'vscode';
|
||||
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
|
||||
|
||||
import * as cli from '../cli';
|
||||
import { DatabaseItem, } from '../databases';
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
tryGetQueryMetadata,
|
||||
upgradesTmpDir
|
||||
} from '../helpers';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { compileDatabaseUpgradeSequence, upgradeDatabaseExplicit } from './upgrades';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
|
||||
|
||||
/**
|
||||
* A collection of evaluation-time information about a query,
|
||||
* including the query itself, and where we have decided to put
|
||||
* temporary files associated with it, such as the compiled query
|
||||
* output and results.
|
||||
*/
|
||||
export class QueryInProgress {
|
||||
|
||||
public queryEvalInfo: QueryEvaluationInfo;
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a QueryEvaluationInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
readonly querySaveDir: string,
|
||||
readonly dbItemPath: string,
|
||||
databaseHasMetadataFile: boolean,
|
||||
readonly queryDbscheme: string, // the dbscheme file the query expects, based on library path resolution
|
||||
readonly quickEvalPosition?: messages.Position,
|
||||
readonly metadata?: QueryMetadata,
|
||||
readonly templates?: Record<string, string>,
|
||||
) {
|
||||
this.queryEvalInfo = new QueryEvaluationInfo(querySaveDir, dbItemPath, databaseHasMetadataFile, quickEvalPosition, metadata);
|
||||
/**/
|
||||
}
|
||||
|
||||
get compiledQueryPath() {
|
||||
return path.join(this.querySaveDir, 'compiledQuery.qlo');
|
||||
}
|
||||
|
||||
|
||||
async run(
|
||||
qs: qsClient.QueryServerClient,
|
||||
upgradeQlo: string | undefined,
|
||||
availableMlModels: cli.MlModelInfo[],
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
queryInfo?: LocalQueryInfo,
|
||||
): Promise<messages.EvaluationResult> {
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
}
|
||||
|
||||
let result: messages.EvaluationResult | null = null;
|
||||
|
||||
const callbackId = qs.registerCallback(res => {
|
||||
result = {
|
||||
...res,
|
||||
logFileLocation: this.queryEvalInfo.logPath
|
||||
};
|
||||
});
|
||||
|
||||
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString(true) }));
|
||||
|
||||
const queryToRun: messages.QueryToRun = {
|
||||
resultsPath: this.queryEvalInfo.resultsPaths.resultsPath,
|
||||
qlo: Uri.file(this.compiledQueryPath).toString(),
|
||||
compiledUpgrade: upgradeQlo && Uri.file(upgradeQlo).toString(),
|
||||
allowUnknownTemplates: true,
|
||||
templateValues: createSimpleTemplates(this.templates),
|
||||
availableMlModels: availableMlModelUris,
|
||||
id: callbackId,
|
||||
timeoutSecs: qs.config.timeoutSecs,
|
||||
};
|
||||
|
||||
const dataset: messages.Dataset = {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
};
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.startLog, {
|
||||
db: dataset,
|
||||
logPath: this.queryEvalInfo.evalLogPath,
|
||||
});
|
||||
|
||||
}
|
||||
const params: messages.EvaluateQueriesParams = {
|
||||
db: dataset,
|
||||
evaluateId: callbackId,
|
||||
queries: [queryToRun],
|
||||
stopOnError: false,
|
||||
useSequenceHint: false
|
||||
};
|
||||
try {
|
||||
await qs.sendRequest(messages.runQueries, params, token, progress);
|
||||
if (qs.config.customLogDirectory) {
|
||||
void showAndLogWarningMessage(
|
||||
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.queryEvalInfo.logPath}.`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
qs.unRegisterCallback(callbackId);
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.endLog, {
|
||||
db: dataset,
|
||||
logPath: this.queryEvalInfo.evalLogPath,
|
||||
});
|
||||
if (await this.queryEvalInfo.hasEvalLog()) {
|
||||
await this.queryEvalInfo.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.queryEvalInfo.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result || {
|
||||
evaluationTime: 0,
|
||||
message: 'No result from server',
|
||||
queryId: -1,
|
||||
runId: callbackId,
|
||||
resultType: messages.QueryResultType.OTHER_ERROR
|
||||
};
|
||||
}
|
||||
|
||||
async compile(
|
||||
qs: qsClient.QueryServerClient,
|
||||
program: messages.QlProgram,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<messages.CompilationMessage[]> {
|
||||
let compiled: messages.CheckQueryResult | undefined;
|
||||
try {
|
||||
const target = this.quickEvalPosition ? {
|
||||
quickEval: { quickEvalPos: this.quickEvalPosition }
|
||||
} : { query: {} };
|
||||
const params: messages.CompileQueryParams = {
|
||||
compilationOptions: {
|
||||
computeNoLocationUrls: true,
|
||||
failOnWarnings: false,
|
||||
fastCompilation: false,
|
||||
includeDilInQlo: true,
|
||||
localChecking: false,
|
||||
noComputeGetUrl: false,
|
||||
noComputeToString: false,
|
||||
computeDefaultStrings: true,
|
||||
emitDebugInfo: true
|
||||
},
|
||||
extraOptions: {
|
||||
timeoutSecs: qs.config.timeoutSecs
|
||||
},
|
||||
queryToCheck: program,
|
||||
resultPath: this.compiledQueryPath,
|
||||
target,
|
||||
};
|
||||
|
||||
compiled = await qs.sendRequest(messages.compileQuery, params, token, progress);
|
||||
} finally {
|
||||
void qs.logger.log(' - - - COMPILATION DONE - - - ', { additionalLogLocation: this.queryEvalInfo.logPath });
|
||||
}
|
||||
return (compiled?.messages || []).filter(msg => msg.severity === messages.Severity.ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
export async function clearCacheInDatabase(
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<messages.ClearCacheResult> {
|
||||
if (dbItem.contents === undefined) {
|
||||
throw new Error('Can\'t clear the cache in an invalid database.');
|
||||
}
|
||||
|
||||
const db: messages.Dataset = {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default',
|
||||
};
|
||||
|
||||
const params: messages.ClearCacheParams = {
|
||||
dryRun: false,
|
||||
db,
|
||||
};
|
||||
|
||||
return qs.sendRequest(messages.clearCache, params, token, progress);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compare the dbscheme implied by the query `query` and that of the current database.
|
||||
* - If they are compatible, do nothing.
|
||||
* - If they are incompatible but the database can be upgraded, suggest that upgrade.
|
||||
* - If they are incompatible and the database cannot be upgraded, throw an error.
|
||||
*/
|
||||
async function checkDbschemeCompatibility(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
query: QueryInProgress,
|
||||
qlProgram: messages.QlProgram,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<void> {
|
||||
const searchPath = getOnDiskWorkspaceFolders();
|
||||
|
||||
if (dbItem.contents?.dbSchemeUri !== undefined) {
|
||||
const { finalDbscheme } = await cliServer.resolveUpgrades(dbItem.contents.dbSchemeUri.fsPath, searchPath, false);
|
||||
const hash = async function(filename: string): Promise<string> {
|
||||
return crypto.createHash('sha256').update(await fs.readFile(filename)).digest('hex');
|
||||
};
|
||||
|
||||
// At this point, we have learned about three dbschemes:
|
||||
|
||||
// the dbscheme of the actual database we're querying.
|
||||
const dbschemeOfDb = await hash(dbItem.contents.dbSchemeUri.fsPath);
|
||||
|
||||
// the dbscheme of the query we're running, including the library we've resolved it to use.
|
||||
const dbschemeOfLib = await hash(query.queryDbscheme);
|
||||
|
||||
// the database we're able to upgrade to
|
||||
const upgradableTo = await hash(finalDbscheme);
|
||||
|
||||
if (upgradableTo != dbschemeOfLib) {
|
||||
reportNoUpgradePath(qlProgram, query);
|
||||
}
|
||||
|
||||
if (upgradableTo == dbschemeOfLib &&
|
||||
dbschemeOfDb != dbschemeOfLib) {
|
||||
// Try to upgrade the database
|
||||
await upgradeDatabaseExplicit(
|
||||
qs,
|
||||
dbItem,
|
||||
progress,
|
||||
token
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryInProgress): void {
|
||||
throw new Error(
|
||||
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compile a non-destructive upgrade.
|
||||
*/
|
||||
async function compileNonDestructiveUpgrade(
|
||||
qs: qsClient.QueryServerClient,
|
||||
upgradeTemp: tmp.DirectoryResult,
|
||||
query: QueryInProgress,
|
||||
qlProgram: messages.QlProgram,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<string> {
|
||||
|
||||
if (!dbItem?.contents?.dbSchemeUri) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
|
||||
// When packaging is used, dependencies may exist outside of the workspace and they are always on the resolved search path.
|
||||
// When packaging is not used, all dependencies are in the workspace.
|
||||
const upgradesPath = (await qs.cliServer.cliConstraints.supportsPackaging())
|
||||
? qlProgram.libraryPath
|
||||
: getOnDiskWorkspaceFolders();
|
||||
|
||||
const { scripts, matchesTarget } = await qs.cliServer.resolveUpgrades(
|
||||
dbItem.contents.dbSchemeUri.fsPath,
|
||||
upgradesPath,
|
||||
true,
|
||||
query.queryDbscheme
|
||||
);
|
||||
|
||||
if (!matchesTarget) {
|
||||
reportNoUpgradePath(qlProgram, query);
|
||||
}
|
||||
const result = await compileDatabaseUpgradeSequence(qs, dbItem, scripts, upgradeTemp, progress, token);
|
||||
if (result.compiledUpgrade === undefined) {
|
||||
const error = result.error || '[no error message available]';
|
||||
throw new Error(error);
|
||||
}
|
||||
// We can upgrade to the actual target
|
||||
qlProgram.dbschemePath = query.queryDbscheme;
|
||||
// We are new enough that we will always support single file upgrades.
|
||||
return result.compiledUpgrade;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export async function compileAndRunQueryAgainstDatabase(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
}
|
||||
|
||||
// Get the workspace folder paths.
|
||||
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
|
||||
// Figure out the library path for the query.
|
||||
const packConfig = await cliServer.resolveLibraryPath(diskWorkspaceFolders, initialInfo.queryPath);
|
||||
|
||||
if (!packConfig.dbscheme) {
|
||||
throw new Error('Could not find a database scheme for this query. Please check that you have a valid qlpack.yml file for this query, which refers to a database scheme either in the `dbscheme` field or through one of its dependencies.');
|
||||
}
|
||||
|
||||
// Check whether the query has an entirely different schema from the
|
||||
// database. (Queries that merely need the database to be upgraded
|
||||
// won't trigger this check)
|
||||
// This test will produce confusing results if we ever change the name of the database schema files.
|
||||
const querySchemaName = path.basename(packConfig.dbscheme);
|
||||
const dbSchemaName = path.basename(dbItem.contents.dbSchemeUri.fsPath);
|
||||
if (querySchemaName != dbSchemaName) {
|
||||
void logger.log(`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`);
|
||||
throw new Error(`The query ${path.basename(initialInfo.queryPath)} cannot be run against the selected database (${dbItem.name}): their target languages are different. Please select a different database and try again.`);
|
||||
}
|
||||
|
||||
const qlProgram: messages.QlProgram = {
|
||||
// The project of the current document determines which library path
|
||||
// we use. The `libraryPath` field in this server message is relative
|
||||
// to the workspace root, not to the project root.
|
||||
libraryPath: packConfig.libraryPath,
|
||||
// Since we are compiling and running a query against a database,
|
||||
// we use the database's DB scheme here instead of the DB scheme
|
||||
// from the current document's project.
|
||||
dbschemePath: dbItem.contents.dbSchemeUri.fsPath,
|
||||
queryPath: initialInfo.queryPath
|
||||
};
|
||||
|
||||
// Read the query metadata if possible, to use in the UI.
|
||||
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
|
||||
|
||||
let availableMlModels: cli.MlModelInfo[] = [];
|
||||
if (!await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
void logger.log('Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.');
|
||||
} else {
|
||||
try {
|
||||
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders, initialInfo.queryPath)).models;
|
||||
if (availableMlModels.length) {
|
||||
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||
} else {
|
||||
void logger.log('Did not find any available ML models.');
|
||||
}
|
||||
} catch (e) {
|
||||
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
|
||||
`query without any ML models: ${e}.`;
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
}
|
||||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryInProgress(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
packConfig.dbscheme,
|
||||
initialInfo.quickEvalPosition,
|
||||
metadata,
|
||||
templates
|
||||
);
|
||||
await query.queryEvalInfo.createTimestampFile();
|
||||
|
||||
let upgradeDir: tmp.DirectoryResult | undefined;
|
||||
try {
|
||||
let upgradeQlo;
|
||||
if (await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
|
||||
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
|
||||
} else {
|
||||
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
|
||||
}
|
||||
let errors;
|
||||
try {
|
||||
errors = await query.compile(qs, qlProgram, progress, token);
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
return createSyntheticResult(query, 'Query cancelled');
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
const message = formatLegacyMessage(result);
|
||||
|
||||
return {
|
||||
query: query.queryEvalInfo,
|
||||
message,
|
||||
result,
|
||||
successful: result.resultType == messages.QueryResultType.SUCCESS,
|
||||
logFileLocation: result.logFileLocation,
|
||||
dispose: () => {
|
||||
qs.logger.removeAdditionalLogLocation(result.logFileLocation);
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Error dialogs are limited in size and scrollability,
|
||||
// so we include a general description of the problem,
|
||||
// and direct the user to the output window for the detailed compilation messages.
|
||||
// However we don't show quick eval errors there so we need to display them anyway.
|
||||
void qs.logger.log(
|
||||
`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`,
|
||||
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||
);
|
||||
|
||||
const formattedMessages: string[] = [];
|
||||
|
||||
for (const error of errors) {
|
||||
const message = error.message || '[no error message available]';
|
||||
const formatted = `ERROR: ${message} (${error.position.fileName}:${error.position.line}:${error.position.column}:${error.position.endLine}:${error.position.endColumn})`;
|
||||
formattedMessages.push(formatted);
|
||||
void qs.logger.log(formatted, { additionalLogLocation: query.queryEvalInfo.logPath });
|
||||
}
|
||||
if (initialInfo.isQuickEval && formattedMessages.length <= 2) {
|
||||
// If there are more than 2 error messages, they will not be displayed well in a popup
|
||||
// and will be trimmed by the function displaying the error popup. Accordingly, we only
|
||||
// try to show the errors if there are 2 or less, otherwise we direct the user to the log.
|
||||
void showAndLogErrorMessage('Quick evaluation compilation failed: ' + formattedMessages.join('\n'));
|
||||
} else {
|
||||
void showAndLogErrorMessage((initialInfo.isQuickEval ? 'Quick evaluation' : 'Query') + compilationFailedErrorTail);
|
||||
}
|
||||
return createSyntheticResult(query, 'Query had compilation errors');
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
await upgradeDir?.cleanup();
|
||||
} catch (e) {
|
||||
void qs.logger.log(
|
||||
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const compilationFailedErrorTail = ' compilation failed. Please make sure there are no errors in the query, the database is up to date,' +
|
||||
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
|
||||
' and choose CodeQL Query Server from the dropdown.';
|
||||
|
||||
export function formatLegacyMessage(result: messages.EvaluationResult) {
|
||||
switch (result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
return `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OOM:
|
||||
return 'out of memory';
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
return `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.TIMEOUT:
|
||||
return `timed out after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
return result.message ? `failed: ${result.message}` : 'failed';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a synthetic result for a query that failed to compile.
|
||||
*/
|
||||
function createSyntheticResult(
|
||||
query: QueryInProgress,
|
||||
message: string,
|
||||
): QueryWithResults {
|
||||
return {
|
||||
query: query.queryEvalInfo,
|
||||
message,
|
||||
result: {
|
||||
evaluationTime: 0,
|
||||
queryId: 0,
|
||||
resultType: messages.QueryResultType.OTHER_ERROR,
|
||||
message,
|
||||
runId: 0,
|
||||
},
|
||||
successful: false,
|
||||
dispose: () => { /**/ },
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createSimpleTemplates(templates: Record<string, string> | undefined): messages.TemplateDefinitions | undefined {
|
||||
if (!templates) {
|
||||
return undefined;
|
||||
}
|
||||
const result: messages.TemplateDefinitions = {};
|
||||
for (const key of Object.keys(templates)) {
|
||||
result[key] = {
|
||||
values: {
|
||||
tuples: [[{ stringValue: templates[key] }]]
|
||||
}
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -1,13 +1,12 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from '../helpers';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
import { DatabaseItem } from './databases';
|
||||
import { DatabaseItem } from '../databases';
|
||||
|
||||
/**
|
||||
* Maximum number of lines to include from database upgrade message,
|
||||
@@ -16,17 +15,6 @@ import { DatabaseItem } from './databases';
|
||||
*/
|
||||
const MAX_UPGRADE_MESSAGE_LINES = 10;
|
||||
|
||||
/**
|
||||
* Check that we support non-destructive upgrades.
|
||||
*
|
||||
* This requires 3 features. The ability to compile an upgrade sequence; The ability to
|
||||
* run a non-destructive upgrades as a query; the ability to specify a target when
|
||||
* resolving upgrades. We check for a version of codeql that has all three features.
|
||||
*/
|
||||
export async function hasNondestructiveUpgradeCapabilities(qs: qsClient.QueryServerClient): Promise<boolean> {
|
||||
return semver.gte(await qs.cliServer.getVersion(), '2.4.2');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compile a database upgrade sequence.
|
||||
@@ -43,7 +31,7 @@ export async function compileDatabaseUpgradeSequence(
|
||||
if (dbItem.contents === undefined || dbItem.contents.dbSchemeUri === undefined) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
if (!await hasNondestructiveUpgradeCapabilities(qs)) {
|
||||
if (!await qs.cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
throw new Error('The version of codeql is too old to run non-destructive upgrades.');
|
||||
}
|
||||
// If possible just compile the upgrade sequence
|
||||
@@ -205,7 +193,14 @@ export async function upgradeDatabaseExplicit(
|
||||
void qs.logger.log('Running the following database upgrade:');
|
||||
|
||||
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades).map(s => s.description).join('\n');
|
||||
return await runDatabaseUpgrade(qs, dbItem, compileUpgradeResult.compiledUpgrades, progress, token);
|
||||
const result = await runDatabaseUpgrade(qs, dbItem, compileUpgradeResult.compiledUpgrades, progress, token);
|
||||
|
||||
// TODO Can remove the next lines when https://github.com/github/codeql-team/issues/1241 is fixed
|
||||
// restart the query server to avoid a bug in the CLI where the upgrade is applied, but the old dbscheme
|
||||
// is still cached in memory.
|
||||
|
||||
await qs.restartQueryServer(progress, token);
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
void showAndLogErrorMessage(`Database upgrade failed: ${e}`);
|
||||
460
extensions/ql-vscode/src/log-insights/join-order.ts
Normal file
460
extensions/ql-vscode/src/log-insights/join-order.ts
Normal file
@@ -0,0 +1,460 @@
|
||||
import * as I from 'immutable';
|
||||
import { EvaluationLogProblemReporter, EvaluationLogScanner, EvaluationLogScannerProvider } from './log-scanner';
|
||||
import { InLayer, ComputeRecursive, SummaryEvent, PipelineRun, ComputeSimple } from './log-summary';
|
||||
|
||||
const DEFAULT_WARNING_THRESHOLD = 50;
|
||||
|
||||
/**
|
||||
* Like `max`, but returns 0 if no meaningful maximum can be computed.
|
||||
*/
|
||||
function safeMax(it?: Iterable<number>) {
|
||||
const m = Math.max(...(it || []));
|
||||
return Number.isFinite(m) ? m : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a key for the maps that that is sent to report generation.
|
||||
* Should only be used on events that are known to define queryCausingWork.
|
||||
*/
|
||||
function makeKey(
|
||||
queryCausingWork: string | undefined,
|
||||
predicate: string,
|
||||
suffix = ''
|
||||
): string {
|
||||
if (queryCausingWork === undefined) {
|
||||
throw new Error(
|
||||
'queryCausingWork was not defined on an event we expected it to be defined for!'
|
||||
);
|
||||
}
|
||||
return `${queryCausingWork}:${predicate}${suffix ? ' ' + suffix : ''}`;
|
||||
}
|
||||
|
||||
const DEPENDENT_PREDICATES_REGEXP = (() => {
|
||||
const regexps = [
|
||||
// SCAN id
|
||||
String.raw`SCAN\s+([0-9a-zA-Z:#_]+)\s`,
|
||||
// JOIN id WITH id
|
||||
String.raw`JOIN\s+([0-9a-zA-Z:#_]+)\s+WITH\s+([0-9a-zA-Z:#_]+)\s`,
|
||||
// AGGREGATE id, id
|
||||
String.raw`AGGREGATE\s+([0-9a-zA-Z:#_]+)\s*,\s+([0-9a-zA-Z:#_]+)`,
|
||||
// id AND NOT id
|
||||
String.raw`([0-9a-zA-Z:#_]+)\s+AND\s+NOT\s+([0-9a-zA-Z:#_]+)`,
|
||||
// INVOKE HIGHER-ORDER RELATION rel ON <id, ..., id>
|
||||
String.raw`INVOKE\s+HIGHER-ORDER\s+RELATION\s[^\s]+\sON\s+<([0-9a-zA-Z:#_<>]+)((?:,[0-9a-zA-Z:#_<>]+)*)>`,
|
||||
// SELECT id
|
||||
String.raw`SELECT\s+([0-9a-zA-Z:#_]+)`
|
||||
];
|
||||
return new RegExp(
|
||||
`${String.raw`\{[0-9]+\}\s+[0-9a-zA-Z]+\s=\s(?:` + regexps.join('|')})`
|
||||
);
|
||||
})();
|
||||
|
||||
function getDependentPredicates(operations: string[]): I.List<string> {
|
||||
return I.List(operations).flatMap(operation => {
|
||||
const matches = DEPENDENT_PREDICATES_REGEXP.exec(operation.trim());
|
||||
if (matches !== null) {
|
||||
return I.List(matches)
|
||||
.rest() // Skip the first group as it's just the entire string
|
||||
.filter(x => !!x && !x.match('r[0-9]+|PRIMITIVE')) // Only keep the references to predicates.
|
||||
.flatMap(x => x.split(',')) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
|
||||
.filter(x => !!x); // Remove empty strings
|
||||
} else {
|
||||
return I.List();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getMainHash(event: InLayer | ComputeRecursive): string {
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'IN_LAYER':
|
||||
return event.mainHash;
|
||||
case 'COMPUTE_RECURSIVE':
|
||||
return event.raHash;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sum arrays a and b element-wise. The shorter array is padded with 0s if the arrays are not the same length.
|
||||
*/
|
||||
function pointwiseSum(a: Int32Array, b: Int32Array, problemReporter: EvaluationLogProblemReporter): Int32Array {
|
||||
function reportIfInconsistent(ai: number, bi: number) {
|
||||
if (ai === -1 && bi !== -1) {
|
||||
problemReporter.log(
|
||||
`Operation was not evaluated in the first pipeline, but it was evaluated in the accumulated pipeline (with tuple count ${bi}).`
|
||||
);
|
||||
}
|
||||
if (ai !== -1 && bi === -1) {
|
||||
problemReporter.log(
|
||||
`Operation was evaluated in the first pipeline (with tuple count ${ai}), but it was not evaluated in the accumulated pipeline.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const length = Math.max(a.length, b.length);
|
||||
const result = new Int32Array(length);
|
||||
for (let i = 0; i < length; i++) {
|
||||
const ai = a[i] || 0;
|
||||
const bi = b[i] || 0;
|
||||
// -1 is used to represent the absence of a tuple count for a line in the pretty-printed RA (e.g. an empty line), so we ignore those.
|
||||
if (i < a.length && i < b.length && (ai === -1 || bi === -1)) {
|
||||
result[i] = -1;
|
||||
reportIfInconsistent(ai, bi);
|
||||
} else {
|
||||
result[i] = ai + bi;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function pushValue<K, V>(m: Map<K, V[]>, k: K, v: V) {
|
||||
if (!m.has(k)) {
|
||||
m.set(k, []);
|
||||
}
|
||||
m.get(k)!.push(v);
|
||||
return m;
|
||||
}
|
||||
|
||||
function computeJoinOrderBadness(
|
||||
maxTupleCount: number,
|
||||
maxDependentPredicateSize: number,
|
||||
resultSize: number
|
||||
): number {
|
||||
return maxTupleCount / Math.max(maxDependentPredicateSize, resultSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* A bucket contains the pointwise sum of the tuple counts, result sizes and dependent predicate sizes
|
||||
* For each (predicate, order) in an SCC, we will compute a bucket.
|
||||
*/
|
||||
interface Bucket {
|
||||
tupleCounts: Int32Array;
|
||||
resultSize: number;
|
||||
dependentPredicateSizes: I.Map<string, number>;
|
||||
}
|
||||
|
||||
class JoinOrderScanner implements EvaluationLogScanner {
|
||||
// Map a predicate hash to its result size
|
||||
private readonly predicateSizes = new Map<string, number>();
|
||||
private readonly layerEvents = new Map<string, (ComputeRecursive | InLayer)[]>();
|
||||
// Map a key of the form 'query-with-demand : predicate name' to its badness input.
|
||||
private readonly maxTupleCountMap = new Map<string, number[]>();
|
||||
private readonly resultSizeMap = new Map<string, number[]>();
|
||||
private readonly maxDependentPredicateSizeMap = new Map<string, number[]>();
|
||||
private readonly joinOrderMetricMap = new Map<string, number>();
|
||||
|
||||
constructor(
|
||||
private readonly problemReporter: EvaluationLogProblemReporter,
|
||||
private readonly warningThreshold: number) {
|
||||
}
|
||||
|
||||
public onEvent(event: SummaryEvent): void {
|
||||
if (
|
||||
event.completionType !== undefined &&
|
||||
event.completionType !== 'SUCCESS'
|
||||
) {
|
||||
return; // Skip any evaluation that wasn't successful
|
||||
}
|
||||
|
||||
this.recordPredicateSizes(event);
|
||||
this.computeBadnessMetric(event);
|
||||
}
|
||||
|
||||
public onDone(): void {
|
||||
void this;
|
||||
}
|
||||
|
||||
private recordPredicateSizes(event: SummaryEvent): void {
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'EXTENSIONAL':
|
||||
case 'COMPUTED_EXTENSIONAL':
|
||||
case 'COMPUTE_SIMPLE':
|
||||
case 'CACHACA':
|
||||
case 'CACHE_HIT': {
|
||||
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||
break;
|
||||
}
|
||||
case 'SENTINEL_EMPTY': {
|
||||
this.predicateSizes.set(event.raHash, 0);
|
||||
break;
|
||||
}
|
||||
case 'COMPUTE_RECURSIVE':
|
||||
case 'IN_LAYER': {
|
||||
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||
// layerEvents are indexed by the mainHash.
|
||||
const hash = getMainHash(event);
|
||||
if (!this.layerEvents.has(hash)) {
|
||||
this.layerEvents.set(hash, []);
|
||||
}
|
||||
this.layerEvents.get(hash)!.push(event);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private reportProblemIfNecessary(event: SummaryEvent, iteration: number, metric: number): void {
|
||||
if (metric >= this.warningThreshold) {
|
||||
this.problemReporter.reportProblem(event.predicateName, event.raHash, iteration,
|
||||
`Relation '${event.predicateName}' has an inefficient join order. Its join order metric is ${metric.toFixed(2)}, which is larger than the threshold of ${this.warningThreshold.toFixed(2)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
private computeBadnessMetric(event: SummaryEvent): void {
|
||||
if (
|
||||
event.completionType !== undefined &&
|
||||
event.completionType !== 'SUCCESS'
|
||||
) {
|
||||
return; // Skip any evaluation that wasn't successful
|
||||
}
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'COMPUTE_SIMPLE': {
|
||||
if (!event.pipelineRuns) {
|
||||
// skip if the optional pipelineRuns field is not present.
|
||||
break;
|
||||
}
|
||||
// Compute the badness metric for a non-recursive predicate. The metric in this case is defined as:
|
||||
// badness = (max tuple count in the pipeline) / (largest predicate this pipeline depends on)
|
||||
const key = makeKey(event.queryCausingWork, event.predicateName);
|
||||
const resultSize = event.resultSize;
|
||||
|
||||
// There is only one entry in `pipelineRuns` if it's a non-recursive predicate.
|
||||
const { maxTupleCount, maxDependentPredicateSize } =
|
||||
this.badnessInputsForNonRecursiveDelta(event.pipelineRuns[0], event);
|
||||
|
||||
if (maxDependentPredicateSize > 0) {
|
||||
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||
pushValue(this.resultSizeMap, key, resultSize);
|
||||
pushValue(
|
||||
this.maxDependentPredicateSizeMap,
|
||||
key,
|
||||
maxDependentPredicateSize
|
||||
);
|
||||
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize!);
|
||||
this.joinOrderMetricMap.set(key, metric);
|
||||
this.reportProblemIfNecessary(event, 0, metric);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'COMPUTE_RECURSIVE': {
|
||||
// Compute the badness metric for a recursive predicate for each ordering.
|
||||
const sccMetricInput = this.badnessInputsForRecursiveDelta(event);
|
||||
// Loop through each predicate in the SCC
|
||||
sccMetricInput.forEach((buckets, predicate) => {
|
||||
// Loop through each ordering of the predicate
|
||||
buckets.forEach((bucket, raReference) => {
|
||||
// Format the key as demanding-query:name (ordering)
|
||||
const key = makeKey(
|
||||
event.queryCausingWork,
|
||||
predicate,
|
||||
`(${raReference})`
|
||||
);
|
||||
const maxTupleCount = Math.max(...bucket.tupleCounts);
|
||||
const resultSize = bucket.resultSize;
|
||||
const maxDependentPredicateSize = Math.max(
|
||||
...bucket.dependentPredicateSizes.values()
|
||||
);
|
||||
|
||||
if (maxDependentPredicateSize > 0) {
|
||||
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||
pushValue(this.resultSizeMap, key, resultSize);
|
||||
pushValue(
|
||||
this.maxDependentPredicateSizeMap,
|
||||
key,
|
||||
maxDependentPredicateSize
|
||||
);
|
||||
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize);
|
||||
const oldMetric = this.joinOrderMetricMap.get(key);
|
||||
if ((oldMetric === undefined) || (metric > oldMetric)) {
|
||||
this.joinOrderMetricMap.set(key, metric);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through an SCC with main node `event`.
|
||||
*/
|
||||
private iterateSCC(
|
||||
event: ComputeRecursive,
|
||||
func: (
|
||||
inLayerEvent: ComputeRecursive | InLayer,
|
||||
run: PipelineRun,
|
||||
iteration: number
|
||||
) => void
|
||||
): void {
|
||||
const sccEvents = this.layerEvents.get(event.raHash)!;
|
||||
const nextPipeline: number[] = new Array(sccEvents.length).fill(0);
|
||||
|
||||
const maxIteration = Math.max(
|
||||
...sccEvents.map(e => e.predicateIterationMillis.length)
|
||||
);
|
||||
|
||||
for (let iteration = 0; iteration < maxIteration; ++iteration) {
|
||||
// Loop through each predicate in this iteration
|
||||
for (let predicate = 0; predicate < sccEvents.length; ++predicate) {
|
||||
const inLayerEvent = sccEvents[predicate];
|
||||
const iterationTime =
|
||||
inLayerEvent.predicateIterationMillis.length <= iteration
|
||||
? -1
|
||||
: inLayerEvent.predicateIterationMillis[iteration];
|
||||
if (iterationTime != -1) {
|
||||
const run: PipelineRun =
|
||||
inLayerEvent.pipelineRuns[nextPipeline[predicate]++];
|
||||
func(inLayerEvent, run, iteration);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the maximum tuple count and maximum dependent predicate size for a non-recursive pipeline
|
||||
*/
|
||||
private badnessInputsForNonRecursiveDelta(
|
||||
pipelineRun: PipelineRun,
|
||||
event: ComputeSimple
|
||||
): { maxTupleCount: number; maxDependentPredicateSize: number } {
|
||||
const dependentPredicateSizes = Object.values(event.dependencies).map(hash =>
|
||||
this.predicateSizes.get(hash) ?? 0 // Should always be present, but zero is a safe default.
|
||||
);
|
||||
const maxDependentPredicateSize = safeMax(dependentPredicateSizes);
|
||||
return {
|
||||
maxTupleCount: safeMax(pipelineRun.counts),
|
||||
maxDependentPredicateSize: maxDependentPredicateSize
|
||||
};
|
||||
}
|
||||
|
||||
private prevDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||
// inferred that it was empty. So its size is 0.
|
||||
return this.curDeltaSizes(event, predicate, i - 1);
|
||||
}
|
||||
|
||||
private curDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||
// inferred that it was empty. So its size is 0.
|
||||
return (
|
||||
this.layerEvents.get(event.raHash)?.find(x => x.predicateName === predicate)?.deltaSizes[i] ?? 0
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the metric dependent predicate sizes and the result size for a predicate in an SCC.
|
||||
*/
|
||||
private badnessInputsForLayer(
|
||||
event: ComputeRecursive,
|
||||
inLayerEvent: InLayer | ComputeRecursive,
|
||||
raReference: string,
|
||||
iteration: number
|
||||
) {
|
||||
const dependentPredicates = getDependentPredicates(
|
||||
inLayerEvent.ra[raReference]
|
||||
);
|
||||
let dependentPredicateSizes: I.Map<string, number>;
|
||||
// We treat the base case as a non-recursive pipeline. In that case, the dependent predicates are
|
||||
// the dependencies of the base case and the cur_deltas.
|
||||
if (raReference === 'base') {
|
||||
dependentPredicateSizes = I.Map(
|
||||
dependentPredicates.map((pred): [string, number] => {
|
||||
// A base case cannot contain a `prev_delta`, but it can contain a `cur_delta`.
|
||||
let size = 0;
|
||||
if (pred.endsWith('#cur_delta')) {
|
||||
size = this.curDeltaSizes(
|
||||
event,
|
||||
pred.slice(0, -'#cur_delta'.length),
|
||||
iteration
|
||||
);
|
||||
} else {
|
||||
const hash = event.dependencies[pred];
|
||||
size = this.predicateSizes.get(hash)!;
|
||||
}
|
||||
return [pred, size];
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// It's a non-base case in a recursive pipeline. In that case, the dependent predicates are
|
||||
// only the prev_deltas.
|
||||
dependentPredicateSizes = I.Map(
|
||||
dependentPredicates
|
||||
.flatMap(pred => {
|
||||
// If it's actually a prev_delta
|
||||
if (pred.endsWith('#prev_delta')) {
|
||||
// Return the predicate without the #prev_delta suffix.
|
||||
return [pred.slice(0, -'#prev_delta'.length)];
|
||||
} else {
|
||||
// Not a recursive delta. Skip it.
|
||||
return [];
|
||||
}
|
||||
})
|
||||
.map((prev): [string, number] => {
|
||||
const size = this.prevDeltaSizes(event, prev, iteration);
|
||||
return [prev, size];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const deltaSize = inLayerEvent.deltaSizes[iteration];
|
||||
return { dependentPredicateSizes, deltaSize };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the metric input for all the events in a SCC that starts with main node `event`
|
||||
*/
|
||||
private badnessInputsForRecursiveDelta(event: ComputeRecursive): Map<string, Map<string, Bucket>> {
|
||||
// nameToOrderToBucket : predicate name -> ordering (i.e., standard, order_500000, etc.) -> bucket
|
||||
const nameToOrderToBucket = new Map<string, Map<string, Bucket>>();
|
||||
|
||||
// Iterate through the SCC and compute the metric inputs
|
||||
this.iterateSCC(event, (inLayerEvent, run, iteration) => {
|
||||
const raReference = run.raReference;
|
||||
const predicateName = inLayerEvent.predicateName;
|
||||
if (!nameToOrderToBucket.has(predicateName)) {
|
||||
nameToOrderToBucket.set(predicateName, new Map());
|
||||
}
|
||||
const orderTobucket = nameToOrderToBucket.get(predicateName)!;
|
||||
if (!orderTobucket.has(raReference)) {
|
||||
orderTobucket.set(raReference, {
|
||||
tupleCounts: new Int32Array(0),
|
||||
resultSize: 0,
|
||||
dependentPredicateSizes: I.Map()
|
||||
});
|
||||
}
|
||||
|
||||
const { dependentPredicateSizes, deltaSize } = this.badnessInputsForLayer(
|
||||
event,
|
||||
inLayerEvent,
|
||||
raReference,
|
||||
iteration
|
||||
);
|
||||
|
||||
const bucket = orderTobucket.get(raReference)!;
|
||||
// Pointwise sum the tuple counts
|
||||
const newTupleCounts = pointwiseSum(
|
||||
bucket.tupleCounts,
|
||||
new Int32Array(run.counts),
|
||||
this.problemReporter
|
||||
);
|
||||
const resultSize = bucket.resultSize + deltaSize;
|
||||
// Pointwise sum the deltas.
|
||||
const newDependentPredicateSizes = bucket.dependentPredicateSizes.mergeWith(
|
||||
(oldSize, newSize) => oldSize + newSize,
|
||||
dependentPredicateSizes
|
||||
);
|
||||
orderTobucket.set(raReference, {
|
||||
tupleCounts: newTupleCounts,
|
||||
resultSize: resultSize,
|
||||
dependentPredicateSizes: newDependentPredicateSizes
|
||||
});
|
||||
});
|
||||
return nameToOrderToBucket;
|
||||
}
|
||||
}
|
||||
|
||||
export class JoinOrderScannerProvider implements EvaluationLogScannerProvider {
|
||||
public createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner {
|
||||
return new JoinOrderScanner(problemReporter, DEFAULT_WARNING_THRESHOLD);
|
||||
}
|
||||
}
|
||||
23
extensions/ql-vscode/src/log-insights/jsonl-reader.ts
Normal file
23
extensions/ql-vscode/src/log-insights/jsonl-reader.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
/**
|
||||
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
|
||||
* by a double newline sequence. This is basically a more human-readable form of JSONL.
|
||||
*
|
||||
* The current implementation reads the entire text of the document into memory, but in the future
|
||||
* it will stream the document to improve the performance with large documents.
|
||||
*
|
||||
* @param path The path to the file.
|
||||
* @param handler Callback to be invoked for each top-level JSON object in order.
|
||||
*/
|
||||
export async function readJsonlFile(path: string, handler: (value: any) => Promise<void>): Promise<void> {
|
||||
const logSummary = await fs.readFile(path, 'utf-8');
|
||||
|
||||
// Remove newline delimiters because summary is in .jsonl format.
|
||||
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);
|
||||
|
||||
for (const obj of jsonSummaryObjects) {
|
||||
const jsonObj = JSON.parse(obj);
|
||||
await handler(jsonObj);
|
||||
}
|
||||
}
|
||||
109
extensions/ql-vscode/src/log-insights/log-scanner-service.ts
Normal file
109
extensions/ql-vscode/src/log-insights/log-scanner-service.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { Diagnostic, DiagnosticSeverity, languages, Range, Uri } from 'vscode';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { QueryHistoryInfo } from '../query-history-info';
|
||||
import { EvaluationLogProblemReporter, EvaluationLogScannerSet } from './log-scanner';
|
||||
import { PipelineInfo, SummarySymbols } from './summary-parser';
|
||||
import * as fs from 'fs-extra';
|
||||
import { logger } from '../logging';
|
||||
|
||||
/**
|
||||
* Compute the key used to find a predicate in the summary symbols.
|
||||
* @param name The name of the predicate.
|
||||
* @param raHash The RA hash of the predicate.
|
||||
* @returns The key of the predicate, consisting of `name@shortHash`, where `shortHash` is the first
|
||||
* eight characters of `raHash`.
|
||||
*/
|
||||
function predicateSymbolKey(name: string, raHash: string): string {
|
||||
return `${name}@${raHash.substring(0, 8)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of `EvaluationLogProblemReporter` that generates `Diagnostic` objects to display
|
||||
* in the VS Code "Problems" view.
|
||||
*/
|
||||
class ProblemReporter implements EvaluationLogProblemReporter {
|
||||
public readonly diagnostics: Diagnostic[] = [];
|
||||
|
||||
constructor(private readonly symbols: SummarySymbols | undefined) {
|
||||
}
|
||||
|
||||
public reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void {
|
||||
const nameWithHash = predicateSymbolKey(predicateName, raHash);
|
||||
const predicateSymbol = this.symbols?.predicates[nameWithHash];
|
||||
let predicateInfo: PipelineInfo | undefined = undefined;
|
||||
if (predicateSymbol !== undefined) {
|
||||
predicateInfo = predicateSymbol.iterations[iteration];
|
||||
}
|
||||
if (predicateInfo !== undefined) {
|
||||
const range = new Range(predicateInfo.raStartLine, 0, predicateInfo.raEndLine + 1, 0);
|
||||
this.diagnostics.push(new Diagnostic(range, message, DiagnosticSeverity.Error));
|
||||
}
|
||||
}
|
||||
|
||||
public log(message: string): void {
|
||||
void logger.log(message);
|
||||
}
|
||||
}
|
||||
|
||||
export class LogScannerService extends DisposableObject {
|
||||
public readonly scanners = new EvaluationLogScannerSet();
|
||||
private readonly diagnosticCollection = this.push(languages.createDiagnosticCollection('ql-eval-log'));
|
||||
private currentItem: QueryHistoryInfo | undefined = undefined;
|
||||
|
||||
constructor(qhm: QueryHistoryManager) {
|
||||
super();
|
||||
|
||||
this.push(qhm.onDidChangeCurrentQueryItem(async (item) => {
|
||||
if (item !== this.currentItem) {
|
||||
this.currentItem = item;
|
||||
await this.scanEvalLog(item);
|
||||
}
|
||||
}));
|
||||
|
||||
this.push(qhm.onDidCompleteQuery(async (item) => {
|
||||
if (item === this.currentItem) {
|
||||
await this.scanEvalLog(item);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluation log for a query, and report any diagnostics.
|
||||
*
|
||||
* @param query The query whose log is to be scanned.
|
||||
*/
|
||||
public async scanEvalLog(
|
||||
query: QueryHistoryInfo | undefined
|
||||
): Promise<void> {
|
||||
this.diagnosticCollection.clear();
|
||||
|
||||
if ((query?.t !== 'local')
|
||||
|| (query.evalLogSummaryLocation === undefined)
|
||||
|| (query.jsonEvalLogSummaryLocation === undefined)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const diagnostics = await this.scanLog(query.jsonEvalLogSummaryLocation, query.evalLogSummarySymbolsLocation);
|
||||
const uri = Uri.file(query.evalLogSummaryLocation);
|
||||
this.diagnosticCollection.set(uri, diagnostics);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||
* @param symbolsLocation The file path of the symbols file for the human-readable log summary.
|
||||
* @returns An array of `Diagnostic`s representing the problems found by scanners.
|
||||
*/
|
||||
private async scanLog(jsonSummaryLocation: string, symbolsLocation: string | undefined): Promise<Diagnostic[]> {
|
||||
let symbols: SummarySymbols | undefined = undefined;
|
||||
if (symbolsLocation !== undefined) {
|
||||
symbols = JSON.parse(await fs.readFile(symbolsLocation, { encoding: 'utf-8' }));
|
||||
}
|
||||
const problemReporter = new ProblemReporter(symbols);
|
||||
|
||||
await this.scanners.scanLog(jsonSummaryLocation, problemReporter);
|
||||
|
||||
return problemReporter.diagnostics;
|
||||
}
|
||||
}
|
||||
103
extensions/ql-vscode/src/log-insights/log-scanner.ts
Normal file
103
extensions/ql-vscode/src/log-insights/log-scanner.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { SummaryEvent } from './log-summary';
|
||||
import { readJsonlFile } from './jsonl-reader';
|
||||
|
||||
/**
|
||||
* Callback interface used to report diagnostics from a log scanner.
|
||||
*/
|
||||
export interface EvaluationLogProblemReporter {
|
||||
/**
|
||||
* Report a potential problem detected in the evaluation log.
|
||||
*
|
||||
* @param predicateName The mangled name of the predicate with the problem.
|
||||
* @param raHash The RA hash of the predicate with the problem.
|
||||
* @param iteration The iteration number with the problem. For a non-recursive predicate, this
|
||||
* must be zero.
|
||||
* @param message The problem message.
|
||||
*/
|
||||
reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void;
|
||||
|
||||
/**
|
||||
* Log a message about a problem in the implementation of the scanner. These will typically be
|
||||
* displayed separate from any problems reported via `reportProblem()`.
|
||||
*/
|
||||
log(message: string): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface implemented by a log scanner. Instances are created via
|
||||
* `EvaluationLogScannerProvider.createScanner()`.
|
||||
*/
|
||||
export interface EvaluationLogScanner {
|
||||
/**
|
||||
* Called for each event in the log summary, in order. The implementation can report problems via
|
||||
* the `EvaluationLogProblemReporter` interface that was supplied to `createScanner()`.
|
||||
* @param event The log summary event.
|
||||
*/
|
||||
onEvent(event: SummaryEvent): void;
|
||||
/**
|
||||
* Called after all events in the log summary have been processed. The implementation can report
|
||||
* problems via the `EvaluationLogProblemReporter` interface that was supplied to
|
||||
* `createScanner()`.
|
||||
*/
|
||||
onDone(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory for log scanners. When a log is to be scanned, all registered
|
||||
* `EvaluationLogScannerProviders` will be asked to create a new instance of `EvaluationLogScanner`
|
||||
* to do the scanning.
|
||||
*/
|
||||
export interface EvaluationLogScannerProvider {
|
||||
/**
|
||||
* Create a new instance of `EvaluationLogScanner` to scan a single summary log.
|
||||
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||
*/
|
||||
createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as VSCode's `Disposable`, but avoids a dependency on VS Code.
|
||||
*/
|
||||
export interface Disposable {
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class EvaluationLogScannerSet {
|
||||
private readonly scannerProviders = new Map<number, EvaluationLogScannerProvider>();
|
||||
private nextScannerProviderId = 0;
|
||||
|
||||
/**
|
||||
* Register a provider that can create instances of `EvaluationLogScanner` to scan evaluation logs
|
||||
* for problems.
|
||||
* @param provider The provider.
|
||||
* @returns A `Disposable` that, when disposed, will unregister the provider.
|
||||
*/
|
||||
public registerLogScannerProvider(provider: EvaluationLogScannerProvider): Disposable {
|
||||
const id = this.nextScannerProviderId;
|
||||
this.nextScannerProviderId++;
|
||||
|
||||
this.scannerProviders.set(id, provider);
|
||||
return {
|
||||
dispose: () => {
|
||||
this.scannerProviders.delete(id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||
*/
|
||||
public async scanLog(jsonSummaryLocation: string, problemReporter: EvaluationLogProblemReporter): Promise<void> {
|
||||
const scanners = [...this.scannerProviders.values()].map(p => p.createScanner(problemReporter));
|
||||
|
||||
await readJsonlFile(jsonSummaryLocation, async obj => {
|
||||
scanners.forEach(scanner => {
|
||||
scanner.onEvent(obj);
|
||||
});
|
||||
});
|
||||
|
||||
scanners.forEach(scanner => scanner.onDone());
|
||||
}
|
||||
}
|
||||
93
extensions/ql-vscode/src/log-insights/log-summary.ts
Normal file
93
extensions/ql-vscode/src/log-insights/log-summary.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
export interface PipelineRun {
|
||||
raReference: string;
|
||||
counts: number[];
|
||||
duplicationPercentages: number[];
|
||||
}
|
||||
|
||||
export interface Ra {
|
||||
[key: string]: string[];
|
||||
}
|
||||
|
||||
export type EvaluationStrategy =
|
||||
'COMPUTE_SIMPLE' |
|
||||
'COMPUTE_RECURSIVE' |
|
||||
'IN_LAYER' |
|
||||
'COMPUTED_EXTENSIONAL' |
|
||||
'EXTENSIONAL' |
|
||||
'SENTINEL_EMPTY' |
|
||||
'CACHACA' |
|
||||
'CACHE_HIT';
|
||||
|
||||
interface SummaryEventBase {
|
||||
evaluationStrategy: EvaluationStrategy;
|
||||
predicateName: string;
|
||||
raHash: string;
|
||||
appearsAs: { [key: string]: { [key: string]: number[] } };
|
||||
completionType?: string;
|
||||
}
|
||||
|
||||
interface ResultEventBase extends SummaryEventBase {
|
||||
resultSize: number;
|
||||
}
|
||||
|
||||
export interface ComputeSimple extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTE_SIMPLE';
|
||||
ra: Ra;
|
||||
pipelineRuns?: [PipelineRun];
|
||||
queryCausingWork?: string;
|
||||
dependencies: { [key: string]: string };
|
||||
}
|
||||
|
||||
export interface ComputeRecursive extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTE_RECURSIVE';
|
||||
deltaSizes: number[];
|
||||
ra: Ra;
|
||||
pipelineRuns: PipelineRun[];
|
||||
queryCausingWork?: string;
|
||||
dependencies: { [key: string]: string };
|
||||
predicateIterationMillis: number[];
|
||||
}
|
||||
|
||||
export interface InLayer extends ResultEventBase {
|
||||
evaluationStrategy: 'IN_LAYER';
|
||||
deltaSizes: number[];
|
||||
ra: Ra;
|
||||
pipelineRuns: PipelineRun[];
|
||||
queryCausingWork?: string;
|
||||
mainHash: string;
|
||||
predicateIterationMillis: number[];
|
||||
}
|
||||
|
||||
export interface ComputedExtensional extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTED_EXTENSIONAL';
|
||||
queryCausingWork?: string;
|
||||
}
|
||||
|
||||
export interface NonComputedExtensional extends ResultEventBase {
|
||||
evaluationStrategy: 'EXTENSIONAL';
|
||||
queryCausingWork?: string;
|
||||
}
|
||||
|
||||
export interface SentinelEmpty extends SummaryEventBase {
|
||||
evaluationStrategy: 'SENTINEL_EMPTY';
|
||||
sentinelRaHash: string;
|
||||
}
|
||||
|
||||
export interface Cachaca extends ResultEventBase {
|
||||
evaluationStrategy: 'CACHACA';
|
||||
}
|
||||
|
||||
export interface CacheHit extends ResultEventBase {
|
||||
evaluationStrategy: 'CACHE_HIT';
|
||||
}
|
||||
|
||||
export type Extensional = ComputedExtensional | NonComputedExtensional;
|
||||
|
||||
export type SummaryEvent =
|
||||
| ComputeSimple
|
||||
| ComputeRecursive
|
||||
| InLayer
|
||||
| Extensional
|
||||
| SentinelEmpty
|
||||
| Cachaca
|
||||
| CacheHit;
|
||||
@@ -0,0 +1,154 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import { RawSourceMap, SourceMapConsumer } from 'source-map';
|
||||
import { commands, Position, Selection, TextDocument, TextEditor, TextEditorRevealType, TextEditorSelectionChangeEvent, ViewColumn, window, workspace } from 'vscode';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { commandRunner } from '../commandRunner';
|
||||
import { logger } from '../logging';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
|
||||
/** A `Position` within a specified file on disk. */
|
||||
interface PositionInFile {
|
||||
filePath: string;
|
||||
position: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the specified source location in a text editor.
|
||||
* @param position The position (including file path) to show.
|
||||
*/
|
||||
async function showSourceLocation(position: PositionInFile): Promise<void> {
|
||||
const document = await workspace.openTextDocument(position.filePath);
|
||||
const editor = await window.showTextDocument(document, ViewColumn.Active);
|
||||
editor.selection = new Selection(position.position, position.position);
|
||||
editor.revealRange(editor.selection, TextEditorRevealType.InCenterIfOutsideViewport);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple language support for human-readable evaluator log summaries.
|
||||
*
|
||||
* This class implements the `codeQL.gotoQL` command, which jumps from RA code to the corresponding
|
||||
* QL code that generated it. It also tracks the current selection and active editor to enable and
|
||||
* disable that command based on whether there is a QL mapping for the current selection.
|
||||
*/
|
||||
export class SummaryLanguageSupport extends DisposableObject {
|
||||
/**
|
||||
* The last `TextDocument` (with language `ql-summary`) for which we tried to find a sourcemap, or
|
||||
* `undefined` if we have not seen such a document yet.
|
||||
*/
|
||||
private lastDocument: TextDocument | undefined = undefined;
|
||||
/**
|
||||
* The sourcemap for `lastDocument`, or `undefined` if there was no such sourcemap or document.
|
||||
*/
|
||||
private sourceMap: SourceMapConsumer | undefined = undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.push(window.onDidChangeActiveTextEditor(this.handleDidChangeActiveTextEditor));
|
||||
this.push(window.onDidChangeTextEditorSelection(this.handleDidChangeTextEditorSelection));
|
||||
this.push(workspace.onDidCloseTextDocument(this.handleDidCloseTextDocument));
|
||||
|
||||
this.push(commandRunner('codeQL.gotoQL', this.handleGotoQL));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the location of the QL code that generated the RA at the current selection in the active
|
||||
* editor, or `undefined` if there is no mapping.
|
||||
*/
|
||||
private async getQLSourceLocation(): Promise<PositionInFile | undefined> {
|
||||
const editor = window.activeTextEditor;
|
||||
if (editor === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const document = editor.document;
|
||||
if (document.languageId !== 'ql-summary') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (document.uri.scheme !== 'file') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (this.lastDocument !== document) {
|
||||
this.clearCache();
|
||||
|
||||
const mapPath = document.uri.fsPath + '.map';
|
||||
|
||||
try {
|
||||
const sourceMapText = await fs.readFile(mapPath, 'utf-8');
|
||||
const rawMap: RawSourceMap = JSON.parse(sourceMapText);
|
||||
this.sourceMap = await new SourceMapConsumer(rawMap);
|
||||
} catch (e: unknown) {
|
||||
// Error reading sourcemap. Pretend there was no sourcemap.
|
||||
void logger.log(`Error reading sourcemap file '${mapPath}': ${getErrorMessage(e)}`);
|
||||
this.sourceMap = undefined;
|
||||
}
|
||||
this.lastDocument = document;
|
||||
}
|
||||
|
||||
if (this.sourceMap === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const qlPosition = this.sourceMap.originalPositionFor({
|
||||
line: editor.selection.start.line + 1,
|
||||
column: editor.selection.start.character,
|
||||
bias: SourceMapConsumer.GREATEST_LOWER_BOUND
|
||||
});
|
||||
|
||||
if ((qlPosition.source === null) || (qlPosition.line === null)) {
|
||||
// No position found.
|
||||
return undefined;
|
||||
}
|
||||
const line = qlPosition.line - 1; // In `source-map`, lines are 1-based...
|
||||
const column = qlPosition.column ?? 0; // ...but columns are 0-based :(
|
||||
|
||||
return {
|
||||
filePath: qlPosition.source,
|
||||
position: new Position(line, column)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the cached sourcemap and its corresponding `TextDocument`.
|
||||
*/
|
||||
private clearCache(): void {
|
||||
if (this.sourceMap !== undefined) {
|
||||
this.sourceMap.destroy();
|
||||
this.sourceMap = undefined;
|
||||
this.lastDocument = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the `codeql.hasQLSource` context variable based on the current selection. This variable
|
||||
* controls whether or not the `codeQL.gotoQL` command is enabled.
|
||||
*/
|
||||
private async updateContext(): Promise<void> {
|
||||
const position = await this.getQLSourceLocation();
|
||||
|
||||
await commands.executeCommand('setContext', 'codeql.hasQLSource', position !== undefined);
|
||||
}
|
||||
|
||||
handleDidChangeActiveTextEditor = async (_editor: TextEditor | undefined): Promise<void> => {
|
||||
await this.updateContext();
|
||||
}
|
||||
|
||||
handleDidChangeTextEditorSelection = async (_e: TextEditorSelectionChangeEvent): Promise<void> => {
|
||||
await this.updateContext();
|
||||
}
|
||||
|
||||
handleDidCloseTextDocument = (document: TextDocument): void => {
|
||||
if (this.lastDocument === document) {
|
||||
this.clearCache();
|
||||
}
|
||||
}
|
||||
|
||||
handleGotoQL = async (): Promise<void> => {
|
||||
const position = await this.getQLSourceLocation();
|
||||
if (position !== undefined) {
|
||||
await showSourceLocation(position);
|
||||
}
|
||||
};
|
||||
}
|
||||
113
extensions/ql-vscode/src/log-insights/summary-parser.ts
Normal file
113
extensions/ql-vscode/src/log-insights/summary-parser.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
/**
|
||||
* Location information for a single pipeline invocation in the RA.
|
||||
*/
|
||||
export interface PipelineInfo {
|
||||
startLine: number;
|
||||
raStartLine: number;
|
||||
raEndLine: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Location information for a single predicate in the RA.
|
||||
*/
|
||||
export interface PredicateSymbol {
|
||||
/**
|
||||
* `PipelineInfo` for each iteration. A non-recursive predicate will have a single iteration `0`.
|
||||
*/
|
||||
iterations: Record<number, PipelineInfo>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Location information for the RA from an evaluation log. Line numbers point into the
|
||||
* human-readable log summary.
|
||||
*/
|
||||
export interface SummarySymbols {
|
||||
predicates: Record<string, PredicateSymbol>;
|
||||
}
|
||||
|
||||
// Tuple counts for Expr::Expr::getParent#dispred#f0820431#ff@76d6745o:
|
||||
const NON_RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) with tuple counts:$/;
|
||||
// Tuple counts for Expr::Expr::getEnclosingStmt#f0820431#bf@923ddwj9 on iteration 0 running pipeline base:
|
||||
const RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) on iteration (?<iteration>\d+) running pipeline (?<pipeline>\S+) with tuple counts:$/;
|
||||
const RETURN_REGEXP = /^\s*return /;
|
||||
|
||||
/**
|
||||
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||
* run.
|
||||
*
|
||||
* TODO: Once we're more certain about the symbol format, we should have the CLI generate this as it
|
||||
* generates the human-readabe summary to avoid having to rely on regular expression matching of the
|
||||
* human-readable text.
|
||||
*
|
||||
* @param summaryPath The path to the summary file.
|
||||
* @param symbolsPath The path to the symbols file to generate.
|
||||
*/
|
||||
export async function generateSummarySymbolsFile(summaryPath: string, symbolsPath: string): Promise<void> {
|
||||
const symbols = await generateSummarySymbols(summaryPath);
|
||||
await fs.writeFile(symbolsPath, JSON.stringify(symbols));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||
* run.
|
||||
*
|
||||
* @param fileLocation The path to the summary file.
|
||||
* @returns Symbol information for the summary file.
|
||||
*/
|
||||
async function generateSummarySymbols(summaryPath: string): Promise<SummarySymbols> {
|
||||
const summary = await fs.promises.readFile(summaryPath, { encoding: 'utf-8' });
|
||||
const symbols: SummarySymbols = {
|
||||
predicates: {}
|
||||
};
|
||||
|
||||
const lines = summary.split(/\r?\n/);
|
||||
let lineNumber = 0;
|
||||
while (lineNumber < lines.length) {
|
||||
const startLineNumber = lineNumber;
|
||||
lineNumber++;
|
||||
const startLine = lines[startLineNumber];
|
||||
const nonRecursiveMatch = startLine.match(NON_RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||
let predicateName: string | undefined = undefined;
|
||||
let iteration = 0;
|
||||
if (nonRecursiveMatch) {
|
||||
predicateName = nonRecursiveMatch.groups!.predicateName;
|
||||
} else {
|
||||
const recursiveMatch = startLine.match(RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||
if (recursiveMatch?.groups) {
|
||||
predicateName = recursiveMatch.groups.predicateName;
|
||||
iteration = parseInt(recursiveMatch.groups.iteration);
|
||||
}
|
||||
}
|
||||
|
||||
if (predicateName !== undefined) {
|
||||
const raStartLine = lineNumber;
|
||||
let raEndLine: number | undefined = undefined;
|
||||
while ((lineNumber < lines.length) && (raEndLine === undefined)) {
|
||||
const raLine = lines[lineNumber];
|
||||
const returnMatch = raLine.match(RETURN_REGEXP);
|
||||
if (returnMatch) {
|
||||
raEndLine = lineNumber;
|
||||
}
|
||||
lineNumber++;
|
||||
}
|
||||
if (raEndLine !== undefined) {
|
||||
let symbol = symbols.predicates[predicateName];
|
||||
if (symbol === undefined) {
|
||||
symbol = {
|
||||
iterations: {}
|
||||
};
|
||||
symbols.predicates[predicateName] = symbol;
|
||||
}
|
||||
symbol.iterations[iteration] = {
|
||||
startLine: lineNumber,
|
||||
raStartLine: raStartLine,
|
||||
raEndLine: raEndLine
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return symbols;
|
||||
}
|
||||
39
extensions/ql-vscode/src/pure/date.ts
Normal file
39
extensions/ql-vscode/src/pure/date.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with dates.
|
||||
*/
|
||||
|
||||
const dateWithoutYearFormatter = new Intl.DateTimeFormat(undefined, {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
});
|
||||
|
||||
const dateFormatter = new Intl.DateTimeFormat(undefined, {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
});
|
||||
|
||||
export function formatDate(value: Date): string {
|
||||
if (value.getFullYear() === new Date().getFullYear()) {
|
||||
return dateWithoutYearFormatter.format(value);
|
||||
}
|
||||
|
||||
return dateFormatter.format(value);
|
||||
}
|
||||
|
||||
// These are overloads for the function that allow us to not add an extra
|
||||
// type check when the value is definitely not undefined.
|
||||
export function parseDate(value: string): Date;
|
||||
export function parseDate(value: string | undefined | null): Date | undefined;
|
||||
|
||||
export function parseDate(value: string | undefined | null): Date | undefined {
|
||||
if (value === undefined || value === null) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return new Date(value);
|
||||
}
|
||||
@@ -2,6 +2,11 @@ import * as sarif from 'sarif';
|
||||
import { AnalysisResults } from '../remote-queries/shared/analysis-result';
|
||||
import { AnalysisSummary, RemoteQueryResult } from '../remote-queries/shared/remote-query-result';
|
||||
import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationValue } from './bqrs-cli-types';
|
||||
import {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from '../remote-queries/shared/variant-analysis';
|
||||
|
||||
/**
|
||||
* This module contains types and code that are shared between
|
||||
@@ -174,7 +179,7 @@ export type FromResultsViewMsg =
|
||||
| ToggleDiagnostics
|
||||
| ChangeRawResultsSortMsg
|
||||
| ChangeInterpretedResultsSortMsg
|
||||
| ResultViewLoaded
|
||||
| ViewLoadedMsg
|
||||
| ChangePage
|
||||
| OpenFileMsg;
|
||||
|
||||
@@ -216,11 +221,11 @@ interface ToggleDiagnostics {
|
||||
}
|
||||
|
||||
/**
|
||||
* Message from the results view to signal that loading the results
|
||||
* is complete.
|
||||
* Message from a view signal that loading is complete.
|
||||
*/
|
||||
interface ResultViewLoaded {
|
||||
t: 'resultViewLoaded';
|
||||
interface ViewLoadedMsg {
|
||||
t: 'viewLoaded';
|
||||
viewName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -279,18 +284,11 @@ interface ChangeInterpretedResultsSortMsg {
|
||||
* Message from the compare view to the extension.
|
||||
*/
|
||||
export type FromCompareViewMessage =
|
||||
| CompareViewLoadedMessage
|
||||
| ViewLoadedMsg
|
||||
| ChangeCompareMessage
|
||||
| ViewSourceFileMsg
|
||||
| OpenQueryMessage;
|
||||
|
||||
/**
|
||||
* Message from the compare view to signal the completion of loading results.
|
||||
*/
|
||||
interface CompareViewLoadedMessage {
|
||||
t: 'compareViewLoaded';
|
||||
}
|
||||
|
||||
/**
|
||||
* Message from the compare view to request opening a query.
|
||||
*/
|
||||
@@ -389,7 +387,7 @@ export interface ParsedResultSets {
|
||||
}
|
||||
|
||||
export type FromRemoteQueriesMessage =
|
||||
| RemoteQueryLoadedMessage
|
||||
| ViewLoadedMsg
|
||||
| RemoteQueryErrorMessage
|
||||
| OpenFileMsg
|
||||
| OpenVirtualFileMsg
|
||||
@@ -402,10 +400,6 @@ export type ToRemoteQueriesMessage =
|
||||
| SetRemoteQueryResultMessage
|
||||
| SetAnalysesResultsMessage;
|
||||
|
||||
export interface RemoteQueryLoadedMessage {
|
||||
t: 'remoteQueryLoaded';
|
||||
}
|
||||
|
||||
export interface SetRemoteQueryResultMessage {
|
||||
t: 'setRemoteQueryResult';
|
||||
queryResult: RemoteQueryResult
|
||||
@@ -433,9 +427,59 @@ export interface RemoteQueryDownloadAllAnalysesResultsMessage {
|
||||
|
||||
export interface RemoteQueryExportResultsMessage {
|
||||
t: 'remoteQueryExportResults';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface CopyRepoListMessage {
|
||||
t: 'copyRepoList';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface SetVariantAnalysisMessage {
|
||||
t: 'setVariantAnalysis';
|
||||
variantAnalysis: VariantAnalysis;
|
||||
}
|
||||
|
||||
export type StopVariantAnalysisMessage = {
|
||||
t: 'stopVariantAnalysis';
|
||||
variantAnalysisId: number;
|
||||
}
|
||||
|
||||
export type VariantAnalysisState = {
|
||||
variantAnalysisId: number;
|
||||
}
|
||||
|
||||
export interface SetRepoResultsMessage {
|
||||
t: 'setRepoResults';
|
||||
repoResults: VariantAnalysisScannedRepositoryResult[];
|
||||
}
|
||||
|
||||
export interface SetRepoStatesMessage {
|
||||
t: 'setRepoStates';
|
||||
repoStates: VariantAnalysisScannedRepositoryState[];
|
||||
}
|
||||
|
||||
export interface RequestRepositoryResultsMessage {
|
||||
t: 'requestRepositoryResults';
|
||||
repositoryFullName: string;
|
||||
}
|
||||
|
||||
export interface OpenQueryFileMessage {
|
||||
t: 'openQueryFile';
|
||||
}
|
||||
|
||||
export interface OpenQueryTextMessage {
|
||||
t: 'openQueryText';
|
||||
}
|
||||
|
||||
export type ToVariantAnalysisMessage =
|
||||
| SetVariantAnalysisMessage
|
||||
| SetRepoResultsMessage
|
||||
| SetRepoStatesMessage;
|
||||
|
||||
export type FromVariantAnalysisMessage =
|
||||
| ViewLoadedMsg
|
||||
| StopVariantAnalysisMessage
|
||||
| RequestRepositoryResultsMessage
|
||||
| OpenQueryFileMessage
|
||||
| OpenQueryTextMessage;
|
||||
|
||||
@@ -15,38 +15,7 @@
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface Position {
|
||||
/**
|
||||
* The one-based index of the start line
|
||||
*/
|
||||
line: number;
|
||||
/**
|
||||
* The one-based offset of the start column within
|
||||
* the start line in UTF-16 code-units
|
||||
*/
|
||||
column: number;
|
||||
/**
|
||||
* The one-based index of the end line line
|
||||
*/
|
||||
endLine: number;
|
||||
|
||||
/**
|
||||
* The one-based offset of the end column within
|
||||
* the end line in UTF-16 code-units
|
||||
*/
|
||||
endColumn: number;
|
||||
/**
|
||||
* The path of the file.
|
||||
* If the file name is "Compiler Generated" the
|
||||
* the position is not a real position but
|
||||
* arises from compiler generated code.
|
||||
*/
|
||||
fileName: string;
|
||||
}
|
||||
import * as shared from './messages-shared';
|
||||
|
||||
/**
|
||||
* A query that should be checked for any errors or warnings
|
||||
@@ -155,6 +124,10 @@ export interface CompilationOptions {
|
||||
* get reported anyway. Useful for universal compilation options.
|
||||
*/
|
||||
computeDefaultStrings: boolean;
|
||||
/**
|
||||
* Emit debug information in compiled query.
|
||||
*/
|
||||
emitDebugInfo: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -254,28 +227,6 @@ export interface DILQuery {
|
||||
dilSource: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export interface CompilationTarget {
|
||||
/**
|
||||
* Compile as a normal query
|
||||
*/
|
||||
query?: Record<string, never>;
|
||||
/**
|
||||
* Compile as a quick evaluation
|
||||
*/
|
||||
quickEval?: QuickEvalOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for quick evaluation
|
||||
*/
|
||||
export interface QuickEvalOptions {
|
||||
quickEvalPos?: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of checking a query.
|
||||
*/
|
||||
@@ -650,7 +601,7 @@ export interface ClearCacheParams {
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
@@ -664,7 +615,7 @@ export interface ClearCacheParams {
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
@@ -1008,37 +959,20 @@ export type DeregisterDatabasesResult = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Type for any action that could have progress messages.
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface WithProgressId<T> {
|
||||
/**
|
||||
* The main body
|
||||
*/
|
||||
body: T;
|
||||
/**
|
||||
* The id used to report progress updates
|
||||
*/
|
||||
progressId: number;
|
||||
}
|
||||
export type Position = shared.Position;
|
||||
|
||||
export interface ProgressMessage {
|
||||
/**
|
||||
* The id of the operation that is running
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The current step
|
||||
*/
|
||||
step: number;
|
||||
/**
|
||||
* The maximum step. This *should* be constant for a single job.
|
||||
*/
|
||||
maxStep: number;
|
||||
/**
|
||||
* The current progress message
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export type CompilationTarget = shared.CompilationTarget;
|
||||
|
||||
export type QuickEvalOptions = shared.QuickEvalOptions;
|
||||
|
||||
export type WithProgressId<T> = shared.WithProgressId<T>;
|
||||
export type ProgressMessage = shared.ProgressMessage;
|
||||
|
||||
/**
|
||||
* Check a Ql query for errors without compiling it
|
||||
@@ -1070,12 +1004,12 @@ export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<Compile
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
@@ -1116,7 +1050,4 @@ export const deregisterDatabases = new rpc.RequestType<
|
||||
*/
|
||||
export const completeQuery = new rpc.RequestType<EvaluationResult, Record<string, any>, void, void>('evaluation/queryCompleted');
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = new rpc.NotificationType<ProgressMessage, void>('ql/progressUpdated');
|
||||
export const progress = shared.progress;
|
||||
@@ -1,44 +1,34 @@
|
||||
import * as os from 'os';
|
||||
import { readJsonlFile } from '../log-insights/jsonl-reader';
|
||||
|
||||
// TODO(angelapwen): Only load in necessary information and
|
||||
// location in bytes for this log to save memory.
|
||||
export interface EvaluatorLogData {
|
||||
queryCausingWork: string;
|
||||
predicateName: string;
|
||||
millis: number;
|
||||
resultSize: number;
|
||||
ra: Pipelines;
|
||||
// location in bytes for this log to save memory.
|
||||
export interface EvalLogData {
|
||||
predicateName: string;
|
||||
millis: number;
|
||||
resultSize: number;
|
||||
// Key: pipeline identifier; Value: array of pipeline steps
|
||||
ra: Record<string, string[]>;
|
||||
}
|
||||
|
||||
interface Pipelines {
|
||||
// Key: pipeline identifier; Value: array of pipeline steps
|
||||
pipelineNamesToSteps: Map<string, string[]>;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A pure method that parses a string of evaluator log summaries into
|
||||
* an array of EvaluatorLogData objects.
|
||||
*
|
||||
* an array of EvalLogData objects.
|
||||
*/
|
||||
export function parseVisualizerData(logSummary: string): EvaluatorLogData[] {
|
||||
// Remove newline delimiters because summary is in .jsonl format.
|
||||
const jsonSummaryObjects: string[] = logSummary.split(os.EOL + os.EOL);
|
||||
const visualizerData: EvaluatorLogData[] = [];
|
||||
|
||||
for (const obj of jsonSummaryObjects) {
|
||||
const jsonObj = JSON.parse(obj);
|
||||
|
||||
// Only convert log items that have an RA and millis field
|
||||
if (jsonObj.ra !== undefined && jsonObj.millis !== undefined) {
|
||||
const newLogData: EvaluatorLogData = {
|
||||
queryCausingWork: jsonObj.queryCausingWork,
|
||||
predicateName: jsonObj.predicateName,
|
||||
millis: jsonObj.millis,
|
||||
resultSize: jsonObj.resultSize,
|
||||
ra: jsonObj.ra
|
||||
};
|
||||
visualizerData.push(newLogData);
|
||||
}
|
||||
}
|
||||
return visualizerData;
|
||||
}
|
||||
export async function parseViewerData(jsonSummaryPath: string): Promise<EvalLogData[]> {
|
||||
const viewerData: EvalLogData[] = [];
|
||||
|
||||
await readJsonlFile(jsonSummaryPath, async jsonObj => {
|
||||
// Only convert log items that have an RA and millis field
|
||||
if (jsonObj.ra !== undefined && jsonObj.millis !== undefined) {
|
||||
const newLogData: EvalLogData = {
|
||||
predicateName: jsonObj.predicateName,
|
||||
millis: jsonObj.millis,
|
||||
resultSize: jsonObj.resultSize,
|
||||
ra: jsonObj.ra
|
||||
};
|
||||
viewerData.push(newLogData);
|
||||
}
|
||||
});
|
||||
|
||||
return viewerData;
|
||||
}
|
||||
|
||||
110
extensions/ql-vscode/src/pure/messages-shared.ts
Normal file
110
extensions/ql-vscode/src/pure/messages-shared.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Types for messages exchanged during jsonrpc communication with the
|
||||
* the CodeQL query server.
|
||||
*
|
||||
* This file exists in the queryserver and in the vscode extension, and
|
||||
* should be kept in sync between them.
|
||||
*
|
||||
* A note about the namespaces below, which look like they are
|
||||
* essentially enums, namely Severity, ResultColumnKind, and
|
||||
* QueryResultType. By design, for the sake of extensibility, clients
|
||||
* receiving messages of this protocol are supposed to accept any
|
||||
* number for any of these types. We commit to the given meaning of
|
||||
* the numbers listed in constants in the namespaces, and we commit to
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface Position {
|
||||
/**
|
||||
* The one-based index of the start line
|
||||
*/
|
||||
line: number;
|
||||
/**
|
||||
* The one-based offset of the start column within
|
||||
* the start line in UTF-16 code-units
|
||||
*/
|
||||
column: number;
|
||||
/**
|
||||
* The one-based index of the end line line
|
||||
*/
|
||||
endLine: number;
|
||||
|
||||
/**
|
||||
* The one-based offset of the end column within
|
||||
* the end line in UTF-16 code-units
|
||||
*/
|
||||
endColumn: number;
|
||||
/**
|
||||
* The path of the file.
|
||||
* If the file name is "Compiler Generated" the
|
||||
* the position is not a real position but
|
||||
* arises from compiler generated code.
|
||||
*/
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export interface CompilationTarget {
|
||||
/**
|
||||
* Compile as a normal query
|
||||
*/
|
||||
query?: Record<string, never>;
|
||||
/**
|
||||
* Compile as a quick evaluation
|
||||
*/
|
||||
quickEval?: QuickEvalOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for quick evaluation
|
||||
*/
|
||||
export interface QuickEvalOptions {
|
||||
quickEvalPos?: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type for any action that could have progress messages.
|
||||
*/
|
||||
export interface WithProgressId<T> {
|
||||
/**
|
||||
* The main body
|
||||
*/
|
||||
body: T;
|
||||
/**
|
||||
* The id used to report progress updates
|
||||
*/
|
||||
progressId: number;
|
||||
}
|
||||
|
||||
export interface ProgressMessage {
|
||||
/**
|
||||
* The id of the operation that is running
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The current step
|
||||
*/
|
||||
step: number;
|
||||
/**
|
||||
* The maximum step. This *should* be constant for a single job.
|
||||
*/
|
||||
maxStep: number;
|
||||
/**
|
||||
* The current progress message
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = new rpc.NotificationType<ProgressMessage, void>('ql/progressUpdated');
|
||||
215
extensions/ql-vscode/src/pure/new-messages.ts
Normal file
215
extensions/ql-vscode/src/pure/new-messages.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
/**
|
||||
* Types for messages exchanged during jsonrpc communication with the
|
||||
* the CodeQL query server.
|
||||
*
|
||||
* This file exists in the queryserver and in the vscode extension, and
|
||||
* should be kept in sync between them.
|
||||
*
|
||||
* A note about the namespaces below, which look like they are
|
||||
* essentially enums, namely Severity, ResultColumnKind, and
|
||||
* QueryResultType. By design, for the sake of extensibility, clients
|
||||
* receiving messages of this protocol are supposed to accept any
|
||||
* number for any of these types. We commit to the given meaning of
|
||||
* the numbers listed in constants in the namespaces, and we commit to
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
import * as shared from './messages-shared';
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Parameters to clear the cache
|
||||
*/
|
||||
export interface ClearCacheParams {
|
||||
/**
|
||||
* The dataset for which we want to clear the cache
|
||||
*/
|
||||
db: string;
|
||||
/**
|
||||
* Whether the cache should actually be cleared.
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
export interface TrimCacheParams {
|
||||
/**
|
||||
* The dataset that we want to trim the cache of.
|
||||
*/
|
||||
db: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of trimming or clearing the cache.
|
||||
*/
|
||||
export interface ClearCacheResult {
|
||||
/**
|
||||
* A user friendly message saying what was or would be
|
||||
* deleted.
|
||||
*/
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
|
||||
export type QueryResultType = number;
|
||||
/**
|
||||
* The result of running a query. This namespace is intentionally not
|
||||
* an enum, see "for the sake of extensibility" comment above.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace QueryResultType {
|
||||
/**
|
||||
* The query ran successfully
|
||||
*/
|
||||
export const SUCCESS = 0;
|
||||
/**
|
||||
* The query failed due to an reason
|
||||
* that isn't listed
|
||||
*/
|
||||
export const OTHER_ERROR = 1;
|
||||
/**
|
||||
* The query failed do to compilation erorrs
|
||||
*/
|
||||
export const COMPILATION_ERROR = 2;
|
||||
/**
|
||||
* The query failed due to running out of
|
||||
* memory
|
||||
*/
|
||||
export const OOM = 3;
|
||||
/**
|
||||
* The query failed because it was cancelled.
|
||||
*/
|
||||
export const CANCELLATION = 4;
|
||||
/**
|
||||
* The dbscheme basename was not the same
|
||||
*/
|
||||
export const DBSCHEME_MISMATCH_NAME = 5;
|
||||
/**
|
||||
* No upgrade was found
|
||||
*/
|
||||
export const DBSCHEME_NO_UPGRADE = 6;
|
||||
}
|
||||
|
||||
|
||||
export interface RegisterDatabasesParams {
|
||||
databases: string[];
|
||||
}
|
||||
|
||||
export interface DeregisterDatabasesParams {
|
||||
databases: string[];
|
||||
}
|
||||
|
||||
export type RegisterDatabasesResult = {
|
||||
registeredDatabases: string[];
|
||||
};
|
||||
|
||||
export type DeregisterDatabasesResult = {
|
||||
registeredDatabases: string[];
|
||||
};
|
||||
|
||||
|
||||
export interface RunQueryParams {
|
||||
/**
|
||||
* The path of the query
|
||||
*/
|
||||
queryPath: string,
|
||||
/**
|
||||
* The output path
|
||||
*/
|
||||
outputPath: string,
|
||||
/**
|
||||
* The database path
|
||||
*/
|
||||
db: string,
|
||||
additionalPacks: string[],
|
||||
target: CompilationTarget,
|
||||
externalInputs: Record<string, string>,
|
||||
singletonExternalInputs: Record<string, string>,
|
||||
dilPath?: string,
|
||||
logPath?: string
|
||||
}
|
||||
|
||||
export interface RunQueryResult {
|
||||
resultType: QueryResultType,
|
||||
message?: string,
|
||||
expectedDbschemeName?: string,
|
||||
evaluationTime: number;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export interface UpgradeParams {
|
||||
db: string,
|
||||
additionalPacks: string[],
|
||||
}
|
||||
|
||||
export type UpgradeResult = Record<string, unknown>;
|
||||
|
||||
export type ClearPackCacheParams = Record<string, unknown>;
|
||||
export type ClearPackCacheResult = Record<string, unknown>;
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export type Position = shared.Position;
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export type CompilationTarget = shared.CompilationTarget;
|
||||
|
||||
export type QuickEvalOptions = shared.QuickEvalOptions;
|
||||
|
||||
export type WithProgressId<T> = shared.WithProgressId<T>;
|
||||
export type ProgressMessage = shared.ProgressMessage;
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
export const clearCache = new rpc.RequestType<WithProgressId<ClearCacheParams>, ClearCacheResult, void, void>('evaluation/clearCache');
|
||||
/**
|
||||
* Trim the cache of a dataset
|
||||
*/
|
||||
export const trimCache = new rpc.RequestType<WithProgressId<TrimCacheParams>, ClearCacheResult, void, void>('evaluation/trimCache');
|
||||
|
||||
/**
|
||||
* Clear the pack cache
|
||||
*/
|
||||
export const clearPackCache = new rpc.RequestType<WithProgressId<ClearPackCacheParams>, ClearPackCacheResult, void, void>('evaluation/clearPackCache');
|
||||
|
||||
/**
|
||||
* Run a query on a database
|
||||
*/
|
||||
export const runQuery = new rpc.RequestType<WithProgressId<RunQueryParams>, RunQueryResult, void, void>('evaluation/runQuery');
|
||||
|
||||
export const registerDatabases = new rpc.RequestType<
|
||||
WithProgressId<RegisterDatabasesParams>,
|
||||
RegisterDatabasesResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/registerDatabases');
|
||||
|
||||
export const deregisterDatabases = new rpc.RequestType<
|
||||
WithProgressId<DeregisterDatabasesParams>,
|
||||
DeregisterDatabasesResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/deregisterDatabases');
|
||||
|
||||
|
||||
export const upgradeDatabase = new rpc.RequestType<
|
||||
WithProgressId<UpgradeParams>,
|
||||
UpgradeResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/runUpgrade');
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = shared.progress;
|
||||
15
extensions/ql-vscode/src/pure/number.ts
Normal file
15
extensions/ql-vscode/src/pure/number.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with numbers.
|
||||
*/
|
||||
|
||||
const numberFormatter = new Intl.NumberFormat('en-US');
|
||||
|
||||
/**
|
||||
* Formats a number to be human-readable with decimal places and thousands separators.
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted number. For example, "10,000", "1,000,000", or "1,000,000,000".
|
||||
*/
|
||||
export function formatDecimal(value: number): string {
|
||||
return numberFormatter.format(value);
|
||||
}
|
||||
@@ -2,7 +2,8 @@
|
||||
* Contains an assortment of helper constants and functions for working with time, dates, and durations.
|
||||
*/
|
||||
|
||||
export const ONE_MINUTE_IN_MS = 1000 * 60;
|
||||
export const ONE_SECOND_IN_MS = 1000;
|
||||
export const ONE_MINUTE_IN_MS = ONE_SECOND_IN_MS * 60;
|
||||
export const ONE_HOUR_IN_MS = ONE_MINUTE_IN_MS * 60;
|
||||
export const TWO_HOURS_IN_MS = ONE_HOUR_IN_MS * 2;
|
||||
export const THREE_HOURS_IN_MS = ONE_HOUR_IN_MS * 3;
|
||||
@@ -43,20 +44,23 @@ export function humanizeRelativeTime(relativeTimeMillis?: number) {
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating an amount of time.
|
||||
* Negative numbers have no meaning and are considered to be "Less than a minute".
|
||||
* Negative numbers have no meaning and are considered to be "Less than a second".
|
||||
*
|
||||
* @param millis The number of milliseconds to convert.
|
||||
* @returns A humanized duration. For example, "2 minutes", "2 hours", "2 days", or "2 months".
|
||||
* @returns A humanized duration. For example, "2 seconds", "2 minutes", "2 hours", "2 days", or "2 months".
|
||||
*/
|
||||
export function humanizeUnit(millis?: number): string {
|
||||
// assume a blank or empty string is a zero
|
||||
// assume anything less than 0 is a zero
|
||||
if (!millis || millis < ONE_MINUTE_IN_MS) {
|
||||
return 'Less than a minute';
|
||||
if (!millis || millis < ONE_SECOND_IN_MS) {
|
||||
return 'Less than a second';
|
||||
}
|
||||
let unit: string;
|
||||
let unitDiff: number;
|
||||
if (millis < ONE_HOUR_IN_MS) {
|
||||
if (millis < ONE_MINUTE_IN_MS) {
|
||||
unit = 'second';
|
||||
unitDiff = Math.floor(millis / ONE_SECOND_IN_MS);
|
||||
} else if (millis < ONE_HOUR_IN_MS) {
|
||||
unit = 'minute';
|
||||
unitDiff = Math.floor(millis / ONE_MINUTE_IN_MS);
|
||||
} else if (millis < ONE_DAY_IN_MS) {
|
||||
|
||||
11
extensions/ql-vscode/src/pure/zip.ts
Normal file
11
extensions/ql-vscode/src/pure/zip.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
|
||||
/**
|
||||
* Unzips a zip file to a directory.
|
||||
* @param sourcePath The path to the zip file.
|
||||
* @param destinationPath The path to the directory to unzip to.
|
||||
*/
|
||||
export async function unzipFile(sourcePath: string, destinationPath: string) {
|
||||
const file = await unzipper.Open.file(sourcePath);
|
||||
await file.extract({ path: destinationPath });
|
||||
}
|
||||
19
extensions/ql-vscode/src/query-history-info.ts
Normal file
19
extensions/ql-vscode/src/query-history-info.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem | VariantAnalysisHistoryItem;
|
||||
|
||||
export function getRawQueryName(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
return item.getQueryName();
|
||||
case 'remote':
|
||||
return item.remoteQuery.queryName;
|
||||
case 'variant-analysis':
|
||||
return item.variantAnalysis.query.name;
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
@@ -26,7 +26,7 @@ type Counter = {
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScubber(
|
||||
export function registerQueryHistoryScrubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
ProviderResult,
|
||||
Range,
|
||||
ThemeIcon,
|
||||
TreeDataProvider,
|
||||
TreeItem,
|
||||
TreeView,
|
||||
Uri,
|
||||
@@ -25,25 +26,31 @@ import {
|
||||
} from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/time';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo as LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { QueryHistoryInfo } from './query-history-info';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { registerQueryHistoryScubber } from './query-history-scrubber';
|
||||
import { registerQueryHistoryScrubber } from './query-history-scrubber';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { slurpQueryHistory, splatQueryHistory } from './query-serialization';
|
||||
import * as fs from 'fs-extra';
|
||||
import { CliVersionConstraint } from './cli';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { Credentials } from './authentication';
|
||||
import { cancelRemoteQuery } from './remote-queries/gh-actions-api-client';
|
||||
import { cancelRemoteQuery } from './remote-queries/gh-api/gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { ResultsView } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
import { EvalLogViewer } from './eval-log-viewer';
|
||||
import EvalLogTreeBuilder from './eval-log-tree-builder';
|
||||
import { EvalLogData, parseViewerData } from './pure/log-summary-parser';
|
||||
import { QueryWithResults } from './run-queries-shared';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { VariantAnalysisManager } from './remote-queries/variant-analysis-manager';
|
||||
|
||||
/**
|
||||
* query-history.ts
|
||||
@@ -111,7 +118,7 @@ const WORKSPACE_QUERY_HISTORY_FILE = 'workspace-query-history.json';
|
||||
/**
|
||||
* Tree data provider for the query history view.
|
||||
*/
|
||||
export class HistoryTreeDataProvider extends DisposableObject {
|
||||
export class HistoryTreeDataProvider extends DisposableObject implements TreeDataProvider<QueryHistoryInfo> {
|
||||
private _sortOrder = SortOrder.DateAsc;
|
||||
|
||||
private _onDidChangeTreeData = super.push(new EventEmitter<QueryHistoryInfo | undefined>());
|
||||
@@ -119,6 +126,10 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
readonly onDidChangeTreeData: Event<QueryHistoryInfo | undefined> = this
|
||||
._onDidChangeTreeData.event;
|
||||
|
||||
private _onDidChangeCurrentQueryItem = super.push(new EventEmitter<QueryHistoryInfo | undefined>());
|
||||
|
||||
public readonly onDidChangeCurrentQueryItem = this._onDidChangeCurrentQueryItem.event;
|
||||
|
||||
private history: QueryHistoryInfo[] = [];
|
||||
|
||||
private failedIconPath: string;
|
||||
@@ -180,7 +191,7 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
break;
|
||||
case QueryStatus.Failed:
|
||||
treeItem.iconPath = this.failedIconPath;
|
||||
treeItem.contextValue = 'cancelledResultsItem';
|
||||
treeItem.contextValue = element.t === 'local' ? 'cancelledResultsItem' : 'cancelledRemoteResultsItem';
|
||||
break;
|
||||
default:
|
||||
assertNever(element.status);
|
||||
@@ -197,13 +208,9 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
const h1Label = this.labelProvider.getLabel(h1).toLowerCase();
|
||||
const h2Label = this.labelProvider.getLabel(h2).toLowerCase();
|
||||
|
||||
const h1Date = h1.t === 'local'
|
||||
? h1.initialInfo.start.getTime()
|
||||
: h1.remoteQuery?.executionStartTime;
|
||||
const h1Date = this.getItemDate(h1);
|
||||
|
||||
const h2Date = h2.t === 'local'
|
||||
? h2.initialInfo.start.getTime()
|
||||
: h2.remoteQuery?.executionStartTime;
|
||||
const h2Date = this.getItemDate(h2);
|
||||
|
||||
const resultCount1 = h1.t === 'local'
|
||||
? h1.completedQuery?.resultCount ?? -1
|
||||
@@ -257,7 +264,10 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
}
|
||||
|
||||
setCurrentItem(item?: QueryHistoryInfo) {
|
||||
this.current = item;
|
||||
if (item !== this.current) {
|
||||
this.current = item;
|
||||
this._onDidChangeCurrentQueryItem.fire(item);
|
||||
}
|
||||
}
|
||||
|
||||
remove(item: QueryHistoryInfo) {
|
||||
@@ -283,7 +293,7 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
|
||||
set allHistory(history: QueryHistoryInfo[]) {
|
||||
this.history = history;
|
||||
this.current = history[0];
|
||||
this.setCurrentItem(history[0]);
|
||||
this.refresh();
|
||||
}
|
||||
|
||||
@@ -299,6 +309,19 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
this._sortOrder = newSortOrder;
|
||||
this._onDidChangeTreeData.fire(undefined);
|
||||
}
|
||||
|
||||
private getItemDate(item: QueryHistoryInfo) {
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
return item.initialInfo.start.getTime();
|
||||
case 'remote':
|
||||
return item.remoteQuery.executionStartTime;
|
||||
case 'variant-analysis':
|
||||
return item.variantAnalysis.executionStartTime;
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class QueryHistoryManager extends DisposableObject {
|
||||
@@ -310,11 +333,19 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
queryHistoryScrubber: Disposable | undefined;
|
||||
private queryMetadataStorageLocation;
|
||||
|
||||
private readonly _onDidChangeCurrentQueryItem = super.push(new EventEmitter<QueryHistoryInfo | undefined>());
|
||||
readonly onDidChangeCurrentQueryItem = this._onDidChangeCurrentQueryItem.event;
|
||||
|
||||
private readonly _onDidCompleteQuery = super.push(new EventEmitter<LocalQueryInfo>());
|
||||
readonly onDidCompleteQuery = this._onDidCompleteQuery.event;
|
||||
|
||||
constructor(
|
||||
private readonly qs: QueryServerClient,
|
||||
private readonly qs: QueryRunner,
|
||||
private readonly dbm: DatabaseManager,
|
||||
private readonly localQueriesInterfaceManager: InterfaceManager,
|
||||
private readonly localQueriesResultsView: ResultsView,
|
||||
private readonly remoteQueriesManager: RemoteQueriesManager,
|
||||
private readonly variantAnalysisManager: VariantAnalysisManager,
|
||||
private readonly evalLogViewer: EvalLogViewer,
|
||||
private readonly queryStorageDir: string,
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly queryHistoryConfigListener: QueryHistoryConfig,
|
||||
@@ -341,6 +372,11 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
canSelectMany: true,
|
||||
}));
|
||||
|
||||
// Forward any change of current history item from the tree data.
|
||||
this.push(this.treeDataProvider.onDidChangeCurrentQueryItem((item) => {
|
||||
this._onDidChangeCurrentQueryItem.fire(item);
|
||||
}));
|
||||
|
||||
// Lazily update the tree view selection due to limitations of TreeView API (see
|
||||
// `updateTreeViewSelectionIfVisible` doc for details)
|
||||
this.push(
|
||||
@@ -432,6 +468,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.handleShowEvalLogSummary.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.showEvalLogViewer',
|
||||
this.handleShowEvalLogViewer.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.cancel',
|
||||
@@ -525,6 +567,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, this, ctx);
|
||||
this.registerToRemoteQueriesEvents();
|
||||
this.registerToVariantAnalysisEvents();
|
||||
}
|
||||
|
||||
public completeQuery(info: LocalQueryInfo, results: QueryWithResults): void {
|
||||
info.completeThisQuery(results);
|
||||
this._onDidCompleteQuery.fire(info);
|
||||
}
|
||||
|
||||
private getCredentials() {
|
||||
@@ -538,7 +586,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.queryHistoryScrubber?.dispose();
|
||||
// Every hour check if we need to re-run the query history scrubber.
|
||||
this.queryHistoryScrubber = this.push(
|
||||
registerQueryHistoryScubber(
|
||||
registerQueryHistoryScrubber(
|
||||
ONE_HOUR_IN_MS,
|
||||
TWO_HOURS_IN_MS,
|
||||
queryHistoryConfigListener.ttlInMillis,
|
||||
@@ -549,8 +597,23 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
);
|
||||
}
|
||||
|
||||
private registerToVariantAnalysisEvents() {
|
||||
const variantAnalysisAddedSubscription = this.variantAnalysisManager.onVariantAnalysisAdded(async (variantAnalysis) => {
|
||||
this.addQuery({
|
||||
t: 'variant-analysis',
|
||||
status: QueryStatus.InProgress,
|
||||
completed: false,
|
||||
variantAnalysis,
|
||||
});
|
||||
|
||||
await this.refreshTreeView();
|
||||
});
|
||||
|
||||
this.push(variantAnalysisAddedSubscription);
|
||||
}
|
||||
|
||||
private registerToRemoteQueriesEvents() {
|
||||
const queryAddedSubscription = this.remoteQueriesManager.onRemoteQueryAdded(event => {
|
||||
const queryAddedSubscription = this.remoteQueriesManager.onRemoteQueryAdded(async (event) => {
|
||||
this.addQuery({
|
||||
t: 'remote',
|
||||
status: QueryStatus.InProgress,
|
||||
@@ -558,6 +621,8 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
queryId: event.queryId,
|
||||
remoteQuery: event.query,
|
||||
});
|
||||
|
||||
await this.refreshTreeView();
|
||||
});
|
||||
|
||||
const queryRemovedSubscription = this.remoteQueriesManager.onRemoteQueryRemoved(async (event) => {
|
||||
@@ -612,10 +677,20 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
const queryPath = finalSingleItem.t === 'local'
|
||||
? finalSingleItem.initialInfo.queryPath
|
||||
: finalSingleItem.remoteQuery.queryFilePath;
|
||||
|
||||
let queryPath: string;
|
||||
switch (finalSingleItem.t) {
|
||||
case 'local':
|
||||
queryPath = finalSingleItem.initialInfo.queryPath;
|
||||
break;
|
||||
case 'remote':
|
||||
queryPath = finalSingleItem.remoteQuery.queryFilePath;
|
||||
break;
|
||||
case 'variant-analysis':
|
||||
queryPath = finalSingleItem.variantAnalysis.query.filePath;
|
||||
break;
|
||||
default:
|
||||
assertNever(finalSingleItem);
|
||||
}
|
||||
const textDocument = await workspace.openTextDocument(
|
||||
Uri.file(queryPath)
|
||||
);
|
||||
@@ -643,6 +718,10 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return this.treeDataProvider.getCurrent();
|
||||
}
|
||||
|
||||
getRemoteQueryById(queryId: string): RemoteQueryHistoryItem | undefined {
|
||||
return this.treeDataProvider.allHistory.find(i => i.t === 'remote' && i.queryId === queryId) as RemoteQueryHistoryItem;
|
||||
}
|
||||
|
||||
async removeDeletedQueries() {
|
||||
await Promise.all(this.treeDataProvider.allHistory.map(async (item) => {
|
||||
if (item.t == 'local' && item.completedQuery && !(await fs.pathExists(item.completedQuery?.query.querySaveDir))) {
|
||||
@@ -669,8 +748,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
// We need to delete it from disk as well.
|
||||
await item.completedQuery?.query.deleteQuery();
|
||||
}
|
||||
} else {
|
||||
} else if (item.t === 'remote') {
|
||||
await this.removeRemoteQuery(item);
|
||||
} else if (item.t === 'variant-analysis') {
|
||||
// TODO
|
||||
} else {
|
||||
assertNever(item);
|
||||
}
|
||||
}));
|
||||
|
||||
@@ -754,7 +837,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
throw new Error('Please select a local query.');
|
||||
}
|
||||
|
||||
if (!finalSingleItem.completedQuery?.didRunSuccessfully) {
|
||||
if (!finalSingleItem.completedQuery?.successful) {
|
||||
throw new Error('Please select a query that has completed successfully.');
|
||||
}
|
||||
|
||||
@@ -867,16 +950,16 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
private warnNoEvalLog() {
|
||||
void showAndLogWarningMessage(`No evaluator log is available for this run. Perhaps it failed before evaluation, or you are running with a version of CodeQL before ' + ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
}
|
||||
|
||||
private warnNoEvalLogSummary() {
|
||||
void showAndLogWarningMessage(`Evaluator log summary and evaluator log are not available for this run. Perhaps they failed before evaluation, or you are running with a version of CodeQL before ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
private warnNoEvalLogs() {
|
||||
void showAndLogWarningMessage(`Evaluator log, summary, and viewer are not available for this run. Perhaps it failed before evaluation, or you are running with a version of CodeQL before ' + ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
}
|
||||
|
||||
private warnInProgressEvalLogSummary() {
|
||||
void showAndLogWarningMessage('The evaluator log summary is still being generated. Please try again later. The summary generation process is tracked in the "CodeQL Extension Log" view.');
|
||||
void showAndLogWarningMessage('The evaluator log summary is still being generated for this run. Please try again later. The summary generation process is tracked in the "CodeQL Extension Log" view.');
|
||||
}
|
||||
|
||||
private warnInProgressEvalLogViewer() {
|
||||
void showAndLogWarningMessage('The viewer\'s data is still being generated for this run. Please try again or re-run the query.');
|
||||
}
|
||||
|
||||
async handleShowEvalLog(
|
||||
@@ -893,7 +976,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (finalSingleItem.evalLogLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogLocation);
|
||||
} else {
|
||||
this.warnNoEvalLog();
|
||||
this.warnNoEvalLogs();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -910,15 +993,41 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
|
||||
if (finalSingleItem.evalLogSummaryLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogSummaryLocation);
|
||||
return;
|
||||
}
|
||||
|
||||
// Summary log file doesn't exist.
|
||||
else {
|
||||
if (finalSingleItem.evalLogLocation && fs.pathExists(finalSingleItem.evalLogLocation)) {
|
||||
// If raw log does exist, then the summary log is still being generated.
|
||||
this.warnInProgressEvalLogSummary();
|
||||
} else {
|
||||
this.warnNoEvalLogSummary();
|
||||
}
|
||||
if (finalSingleItem.evalLogLocation && await fs.pathExists(finalSingleItem.evalLogLocation)) {
|
||||
// If raw log does exist, then the summary log is still being generated.
|
||||
this.warnInProgressEvalLogSummary();
|
||||
} else {
|
||||
this.warnNoEvalLogs();
|
||||
}
|
||||
}
|
||||
|
||||
async handleShowEvalLogViewer(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[],
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
// Only applicable to an individual local query
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local') {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the JSON summary file location wasn't saved, display error
|
||||
if (finalSingleItem.jsonEvalLogSummaryLocation == undefined) {
|
||||
this.warnInProgressEvalLogViewer();
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO(angelapwen): Stream the file in.
|
||||
try {
|
||||
const evalLogData: EvalLogData[] = await parseViewerData(finalSingleItem.jsonEvalLogSummaryLocation);
|
||||
const evalLogTreeBuilder = new EvalLogTreeBuilder(finalSingleItem.getQueryName(), evalLogData);
|
||||
this.evalLogViewer.updateRoots(await evalLogTreeBuilder.getRoots());
|
||||
} catch (e) {
|
||||
throw new Error(`Could not read evaluator log summary JSON file to generate viewer data at ${finalSingleItem.jsonEvalLogSummaryLocation}.`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -926,8 +1035,6 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
) {
|
||||
// Local queries only
|
||||
// In the future, we may support cancelling remote queries, but this is not a short term plan.
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
const selected = finalMultiSelect || [finalSingleItem];
|
||||
@@ -960,15 +1067,20 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
isQuickEval: String(!!(finalSingleItem.t === 'local' && finalSingleItem.initialInfo.quickEvalPosition)),
|
||||
queryText: encodeURIComponent(await this.getQueryText(finalSingleItem)),
|
||||
});
|
||||
const queryId = finalSingleItem.t === 'local'
|
||||
? finalSingleItem.initialInfo.id
|
||||
: finalSingleItem.queryId;
|
||||
|
||||
const uri = Uri.parse(
|
||||
`codeql:${queryId}?${params.toString()}`, true
|
||||
);
|
||||
const doc = await workspace.openTextDocument(uri);
|
||||
await window.showTextDocument(doc, { preview: false });
|
||||
if (finalSingleItem.t === 'variant-analysis') {
|
||||
// TODO
|
||||
} else {
|
||||
const queryId = finalSingleItem.t === 'local'
|
||||
? finalSingleItem.initialInfo.id
|
||||
: finalSingleItem.queryId;
|
||||
|
||||
const uri = Uri.parse(
|
||||
`codeql:${queryId}?${params.toString()}`, true
|
||||
);
|
||||
const doc = await workspace.openTextDocument(uri);
|
||||
await window.showTextDocument(doc, { preview: false });
|
||||
}
|
||||
}
|
||||
|
||||
async handleViewSarifAlerts(
|
||||
@@ -1011,7 +1123,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
void this.tryOpenExternalFile(query.csvPath);
|
||||
return;
|
||||
}
|
||||
if (await query.exportCsvResults(this.qs, query.csvPath)) {
|
||||
if (await query.exportCsvResults(this.qs.cliServer, query.csvPath)) {
|
||||
void this.tryOpenExternalFile(
|
||||
query.csvPath
|
||||
);
|
||||
@@ -1030,7 +1142,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
await this.tryOpenExternalFile(
|
||||
await finalSingleItem.completedQuery.query.ensureCsvAlerts(this.qs, this.dbm)
|
||||
await finalSingleItem.completedQuery.query.ensureCsvAlerts(this.qs.cliServer, this.dbm)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1046,7 +1158,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
await this.tryOpenExternalFile(
|
||||
await finalSingleItem.completedQuery.query.ensureDilPath(this.qs)
|
||||
await finalSingleItem.completedQuery.query.ensureDilPath(this.qs.cliServer)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1084,9 +1196,16 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
}
|
||||
|
||||
async getQueryText(item: QueryHistoryInfo): Promise<string> {
|
||||
return item.t === 'local'
|
||||
? item.initialInfo.queryText
|
||||
: item.remoteQuery.queryText;
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
return item.initialInfo.queryText;
|
||||
case 'remote':
|
||||
return item.remoteQuery.queryText;
|
||||
case 'variant-analysis':
|
||||
return 'TODO';
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
|
||||
async handleExportResults(): Promise<void> {
|
||||
@@ -1171,7 +1290,7 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
if (!otherQuery.completedQuery) {
|
||||
throw new Error('Please select a completed query.');
|
||||
}
|
||||
if (!otherQuery.completedQuery.didRunSuccessfully) {
|
||||
if (!otherQuery.completedQuery.successful) {
|
||||
throw new Error('Please select a successful query.');
|
||||
}
|
||||
if (otherQuery.initialInfo.databaseInfo.name !== dbName) {
|
||||
@@ -1191,7 +1310,7 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
otherQuery !== singleItem &&
|
||||
otherQuery.t === 'local' &&
|
||||
otherQuery.completedQuery &&
|
||||
otherQuery.completedQuery.didRunSuccessfully &&
|
||||
otherQuery.completedQuery.successful &&
|
||||
otherQuery.initialInfo.databaseInfo.name === dbName
|
||||
)
|
||||
.map((item) => ({
|
||||
@@ -1299,7 +1418,7 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
|
||||
private async openQueryResults(item: QueryHistoryInfo) {
|
||||
if (item.t === 'local') {
|
||||
await this.localQueriesInterfaceManager.showResults(item as CompletedLocalQueryInfo, WebviewReveal.Forced, false);
|
||||
await this.localQueriesResultsView.showResults(item as CompletedLocalQueryInfo, WebviewReveal.Forced, false);
|
||||
}
|
||||
else if (item.t === 'remote') {
|
||||
await this.remoteQueriesManager.openRemoteQueryResults(item.queryId);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { CancellationTokenSource, env } from 'vscode';
|
||||
|
||||
import { QueryWithResults, QueryEvaluationInfo } from './run-queries';
|
||||
import * as messages from './pure/messages';
|
||||
import * as messages from './pure/messages-shared';
|
||||
import * as legacyMessages from './pure/legacy-messages';
|
||||
import * as cli from './cli';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
@@ -16,7 +16,8 @@ import {
|
||||
} from './pure/interface-types';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from './run-queries-shared';
|
||||
import { formatLegacyMessage } from './legacy-query-server/run-queries';
|
||||
|
||||
/**
|
||||
* query-results.ts
|
||||
@@ -44,7 +45,12 @@ export interface InitialQueryInfo {
|
||||
|
||||
export class CompletedQueryInfo implements QueryWithResults {
|
||||
readonly query: QueryEvaluationInfo;
|
||||
readonly result: messages.EvaluationResult;
|
||||
readonly message?: string;
|
||||
readonly successful?: boolean;
|
||||
/**
|
||||
* The legacy result. This is only set when loading from the query history.
|
||||
*/
|
||||
readonly result: legacyMessages.EvaluationResult;
|
||||
readonly logFileLocation?: string;
|
||||
resultCount: number;
|
||||
|
||||
@@ -68,16 +74,18 @@ export class CompletedQueryInfo implements QueryWithResults {
|
||||
interpretedResultsSortState: InterpretedResultsSortState | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link FullQueryInfo.slurp} method, we create a CompletedQueryInfo instance
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a CompletedQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
evaluation: QueryWithResults,
|
||||
) {
|
||||
this.query = evaluation.query;
|
||||
this.result = evaluation.result;
|
||||
this.logFileLocation = evaluation.logFileLocation;
|
||||
this.result = evaluation.result;
|
||||
|
||||
this.message = evaluation.message;
|
||||
this.successful = evaluation.successful;
|
||||
// Use the dispose method from the evaluation.
|
||||
// The dispose will clean up any additional log locations that this
|
||||
// query may have created.
|
||||
@@ -92,18 +100,12 @@ export class CompletedQueryInfo implements QueryWithResults {
|
||||
}
|
||||
|
||||
get statusString(): string {
|
||||
switch (this.result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
return `cancelled after ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OOM:
|
||||
return 'out of memory';
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
return `finished in ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.TIMEOUT:
|
||||
return `timed out after ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
return this.result.message ? `failed: ${this.result.message}` : 'failed';
|
||||
if (this.message) {
|
||||
return this.message;
|
||||
} else if (this.result) {
|
||||
return formatLegacyMessage(this.result);
|
||||
} else {
|
||||
throw new Error('No status available');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,10 +117,6 @@ export class CompletedQueryInfo implements QueryWithResults {
|
||||
|| this.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
get didRunSuccessfully(): boolean {
|
||||
return this.result.resultType === messages.QueryResultType.SUCCESS;
|
||||
}
|
||||
|
||||
async updateSortState(
|
||||
server: cli.CodeQLCliServer,
|
||||
resultSetName: string,
|
||||
@@ -202,14 +200,12 @@ export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in Interface and Compare-Interface for queries that we know have been complated.
|
||||
* Used in Interface and Compare-Interface for queries that we know have been completed.
|
||||
*/
|
||||
export type CompletedLocalQueryInfo = LocalQueryInfo & {
|
||||
completedQuery: CompletedQueryInfo
|
||||
};
|
||||
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem;
|
||||
|
||||
export class LocalQueryInfo {
|
||||
readonly t = 'local';
|
||||
|
||||
@@ -217,6 +213,8 @@ export class LocalQueryInfo {
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
public jsonEvalLogSummaryLocation: string | undefined;
|
||||
public evalLogSummarySymbolsLocation: string | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
|
||||
@@ -281,7 +279,7 @@ export class LocalQueryInfo {
|
||||
return !!this.completedQuery;
|
||||
}
|
||||
|
||||
completeThisQuery(info: QueryWithResults) {
|
||||
completeThisQuery(info: QueryWithResults): void {
|
||||
this.completedQuery = new CompletedQueryInfo(info);
|
||||
|
||||
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
|
||||
@@ -300,7 +298,7 @@ export class LocalQueryInfo {
|
||||
return QueryStatus.Failed;
|
||||
} else if (!this.completedQuery) {
|
||||
return QueryStatus.InProgress;
|
||||
} else if (this.completedQuery.didRunSuccessfully) {
|
||||
} else if (this.completedQuery.successful) {
|
||||
return QueryStatus.Completed;
|
||||
} else {
|
||||
return QueryStatus.Failed;
|
||||
|
||||
@@ -3,9 +3,11 @@ import * as path from 'path';
|
||||
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { CompletedQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { QueryHistoryInfo } from './query-history-info';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
import { QueryEvaluationInfo } from './run-queries-shared';
|
||||
import { QueryResultType } from './pure/legacy-messages';
|
||||
|
||||
export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
@@ -15,8 +17,8 @@ export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInf
|
||||
|
||||
const data = await fs.readFile(fsPath, 'utf8');
|
||||
const obj = JSON.parse(data);
|
||||
if (obj.version !== 1) {
|
||||
void showAndLogErrorMessage(`Unsupported query history format: v${obj.version}. `);
|
||||
if (![1, 2].includes(obj.version)) {
|
||||
void showAndLogErrorMessage(`Can't parse query history. Unsupported query history format: v${obj.version}. `);
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -38,6 +40,17 @@ export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInf
|
||||
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
|
||||
// slurped queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
|
||||
// Previously, there was a typo in the completedQuery type. There was a field
|
||||
// `sucessful` and it was renamed to `successful`. We need to handle this case.
|
||||
if ('sucessful' in q.completedQuery) {
|
||||
(q.completedQuery as any).successful = (q.completedQuery as any).sucessful;
|
||||
delete (q.completedQuery as any).sucessful;
|
||||
}
|
||||
|
||||
if (!('successful' in q.completedQuery)) {
|
||||
(q.completedQuery as any).successful = q.completedQuery.result?.resultType === QueryResultType.SUCCESS;
|
||||
}
|
||||
}
|
||||
} else if (q.t === 'remote') {
|
||||
// A bug was introduced that didn't set the completed flag in query history
|
||||
@@ -54,7 +67,7 @@ export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInf
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
return asyncFilter(parsedQueries, async (q) => {
|
||||
if (q.t === 'remote') {
|
||||
if (q.t === 'remote' || q.t === 'variant-analysis') {
|
||||
// the slurper doesn't know where the remote queries are stored
|
||||
// so we need to assume here that they exist. Later, we check to
|
||||
// see if they exist on disk.
|
||||
@@ -90,7 +103,10 @@ export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: str
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter(q => q.t === 'local' ? q.completedQuery !== undefined : true);
|
||||
const data = JSON.stringify({
|
||||
version: 1,
|
||||
// version 2:
|
||||
// - adds the `variant-analysis` type
|
||||
// - ensures a `successful` property exists on completedQuery
|
||||
version: 2,
|
||||
queries: filteredQueries
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
|
||||
81
extensions/ql-vscode/src/query-server/query-runner.ts
Normal file
81
extensions/ql-vscode/src/query-server/query-runner.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { clearCache, ClearCacheParams, clearPackCache, deregisterDatabases, registerDatabases, upgradeDatabase } from '../pure/new-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { compileAndRunQueryAgainstDatabase } from './run-queries';
|
||||
import * as vscode from 'vscode';
|
||||
import { getOnDiskWorkspaceFolders } from '../helpers';
|
||||
export class NewQueryRunner extends QueryRunner {
|
||||
|
||||
|
||||
constructor(public readonly qs: QueryServerClient) {
|
||||
super();
|
||||
}
|
||||
|
||||
get cliServer() {
|
||||
return this.qs.cliServer;
|
||||
}
|
||||
|
||||
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await this.qs.restartQueryServer(progress, token);
|
||||
}
|
||||
|
||||
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
|
||||
this.qs.onDidStartQueryServer(callBack);
|
||||
}
|
||||
|
||||
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
if (dbItem.contents === undefined) {
|
||||
throw new Error('Can\'t clear the cache in an invalid database.');
|
||||
}
|
||||
|
||||
const db = dbItem.databaseUri.fsPath;
|
||||
const params: ClearCacheParams = {
|
||||
dryRun: false,
|
||||
db,
|
||||
};
|
||||
await this.qs.sendRequest(clearCache, params, token, progress);
|
||||
}
|
||||
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
|
||||
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
|
||||
}
|
||||
|
||||
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: string[] = [dbItem.databaseUri.fsPath];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: string[] = [dbItem.databaseUri.fsPath];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
|
||||
async clearPackCache(): Promise<void> {
|
||||
await this.qs.sendRequest(clearPackCache, {});
|
||||
}
|
||||
|
||||
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
|
||||
const yesItem = { title: 'Yes', isCloseAffordance: false };
|
||||
const noItem = { title: 'No', isCloseAffordance: true };
|
||||
const dialogOptions: vscode.MessageItem[] = [yesItem, noItem];
|
||||
|
||||
|
||||
|
||||
const message = `Should the database ${dbItem.databaseUri.fsPath} be destructively upgraded?\n\nThis should not be necessary to run queries
|
||||
as we will non-destructively update it anyway.`;
|
||||
const chosenItem = await vscode.window.showInformationMessage(message, { modal: true }, ...dialogOptions);
|
||||
|
||||
if (chosenItem !== yesItem) {
|
||||
throw new UserCancellationException('User cancelled the database upgrade.');
|
||||
}
|
||||
await this.qs.sendRequest(upgradeDatabase, { db: dbItem.databaseUri.fsPath, additionalPacks: getOnDiskWorkspaceFolders() }, token, progress);
|
||||
}
|
||||
}
|
||||
205
extensions/ql-vscode/src/query-server/queryserver-client.ts
Normal file
205
extensions/ql-vscode/src/query-server/queryserver-client.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from '../cli';
|
||||
import { QueryServerConfig } from '../config';
|
||||
import { Logger, ProgressReporter } from '../logging';
|
||||
import { progress, ProgressMessage, WithProgressId } from '../pure/new-messages';
|
||||
import * as messages from '../pure/new-messages';
|
||||
import { ProgressCallback, ProgressTask } from '../commandRunner';
|
||||
import { findQueryLogFile } from '../run-queries-shared';
|
||||
import { ServerProcess } from '../json-rpc-server';
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
contextStoragePath: string;
|
||||
}
|
||||
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
/**
|
||||
* Client that manages a query server process.
|
||||
* The server process is started upon initialization and tracked during its lifetime.
|
||||
* The server process is disposed when the client is disposed, or if the client asks
|
||||
* to restart it (which disposes the existing process and starts a new one).
|
||||
*/
|
||||
export class QueryServerClient extends DisposableObject {
|
||||
|
||||
serverProcess?: ServerProcess;
|
||||
progressCallbacks: { [key: number]: ((res: ProgressMessage) => void) | undefined };
|
||||
nextCallback: number;
|
||||
nextProgress: number;
|
||||
withProgressReporting: WithProgressReporting;
|
||||
|
||||
private readonly queryServerStartListeners = [] as ProgressTask<void>[];
|
||||
|
||||
// Can't use standard vscode EventEmitter here since they do not cause the calling
|
||||
// function to fail if one of the event handlers fail. This is something that
|
||||
// we need here.
|
||||
readonly onDidStartQueryServer = (e: ProgressTask<void>) => {
|
||||
this.queryServerStartListeners.push(e);
|
||||
}
|
||||
|
||||
public activeQueryLogFile: string | undefined;
|
||||
|
||||
constructor(
|
||||
readonly config: QueryServerConfig,
|
||||
readonly cliServer: cli.CodeQLCliServer,
|
||||
readonly opts: ServerOpts,
|
||||
withProgressReporting: WithProgressReporting
|
||||
) {
|
||||
super();
|
||||
// When the query server configuration changes, restart the query server.
|
||||
if (config.onDidChangeConfiguration !== undefined) {
|
||||
this.push(config.onDidChangeConfiguration(() =>
|
||||
commands.executeCommand('codeQL.restartQueryServer')));
|
||||
}
|
||||
this.withProgressReporting = withProgressReporting;
|
||||
this.nextCallback = 0;
|
||||
this.nextProgress = 0;
|
||||
this.progressCallbacks = {};
|
||||
}
|
||||
|
||||
get logger(): Logger {
|
||||
return this.opts.logger;
|
||||
}
|
||||
|
||||
/** Stops the query server by disposing of the current server process. */
|
||||
private stopQueryServer(): void {
|
||||
if (this.serverProcess !== undefined) {
|
||||
this.disposeAndStopTracking(this.serverProcess);
|
||||
} else {
|
||||
void this.logger.log('No server process to be stopped.');
|
||||
}
|
||||
}
|
||||
|
||||
/** Restarts the query server by disposing of the current server process and then starting a new one. */
|
||||
async restartQueryServer(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<void> {
|
||||
this.stopQueryServer();
|
||||
await this.startQueryServer();
|
||||
|
||||
// Ensure we await all responses from event handlers so that
|
||||
// errors can be properly reported to the user.
|
||||
await Promise.all(this.queryServerStartListeners.map(handler => handler(
|
||||
progress,
|
||||
token
|
||||
)));
|
||||
}
|
||||
|
||||
showLog(): void {
|
||||
this.logger.show();
|
||||
}
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the status bar. */
|
||||
async startQueryServer(): Promise<void> {
|
||||
// Use an arrow function to preserve the value of `this`.
|
||||
return this.withProgressReporting((progress, _) => this.startQueryServerImpl(progress));
|
||||
}
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the given reporter. */
|
||||
private async startQueryServerImpl(progressReporter: ProgressReporter): Promise<void> {
|
||||
void this.logger.log('Starting NEW query server.');
|
||||
|
||||
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
|
||||
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
|
||||
|
||||
if (this.config.saveCache) {
|
||||
args.push('--save-cache');
|
||||
}
|
||||
|
||||
if (this.config.cacheSize > 0) {
|
||||
args.push('--max-disk-cache');
|
||||
args.push(this.config.cacheSize.toString());
|
||||
}
|
||||
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
|
||||
args.push('--evaluator-log');
|
||||
args.push(structuredLogFile);
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
args.push('5');
|
||||
|
||||
|
||||
if (this.config.debug) {
|
||||
args.push('--debug', '--tuple-counting');
|
||||
}
|
||||
|
||||
if (cli.shouldDebugQueryServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
const child = cli.spawnServer(
|
||||
this.config.codeQlPath,
|
||||
'CodeQL query server',
|
||||
['execute', 'query-server2'],
|
||||
args,
|
||||
this.logger,
|
||||
data => this.logger.log(data.toString(), {
|
||||
trailingNewline: false,
|
||||
additionalLogLocation: this.activeQueryLogFile
|
||||
}),
|
||||
undefined, // no listener for stdout
|
||||
progressReporter
|
||||
);
|
||||
progressReporter.report({ message: 'Connecting to CodeQL query server' });
|
||||
const connection = createMessageConnection(child.stdout, child.stdin);
|
||||
connection.onNotification(progress, res => {
|
||||
const callback = this.progressCallbacks[res.id];
|
||||
if (callback) {
|
||||
callback(res);
|
||||
}
|
||||
});
|
||||
this.serverProcess = new ServerProcess(child, connection, 'Query Server 2', this.logger);
|
||||
// Ensure the server process is disposed together with this client.
|
||||
this.track(this.serverProcess);
|
||||
connection.listen();
|
||||
progressReporter.report({ message: 'Connected to CodeQL query server v2' });
|
||||
this.nextCallback = 0;
|
||||
this.nextProgress = 0;
|
||||
this.progressCallbacks = {};
|
||||
}
|
||||
|
||||
get serverProcessPid(): number {
|
||||
return this.serverProcess!.child.pid || 0;
|
||||
}
|
||||
|
||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||
const id = this.nextProgress++;
|
||||
this.progressCallbacks[id] = progress;
|
||||
|
||||
this.updateActiveQuery(type.method, parameter);
|
||||
try {
|
||||
if (this.serverProcess === undefined) {
|
||||
throw new Error('No query server process found.');
|
||||
}
|
||||
return await this.serverProcess.connection.sendRequest(type, { body: parameter, progressId: id }, token);
|
||||
} finally {
|
||||
delete this.progressCallbacks[id];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the active query every time there is a new request to compile.
|
||||
* The active query is used to specify the side log.
|
||||
*
|
||||
* This isn't ideal because in situations where there are queries running
|
||||
* in parallel, each query's log messages are interleaved. Fixing this
|
||||
* properly will require a change in the query server.
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.runQuery.method) {
|
||||
this.activeQueryLogFile = findQueryLogFile(path.dirname(path.dirname((parameter as messages.RunQueryParams).outputPath)));
|
||||
}
|
||||
}
|
||||
}
|
||||
143
extensions/ql-vscode/src/query-server/run-queries.ts
Normal file
143
extensions/ql-vscode/src/query-server/run-queries.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import * as path from 'path';
|
||||
import {
|
||||
CancellationToken
|
||||
} from 'vscode';
|
||||
import * as cli from '../cli';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
tryGetQueryMetadata
|
||||
} from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/new-messages';
|
||||
import * as legacyMessages from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import * as qsClient from './queryserver-client';
|
||||
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
* --------------
|
||||
*
|
||||
* Compiling and running QL queries.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* A collection of evaluation-time information about a query,
|
||||
* including the query itself, and where we have decided to put
|
||||
* temporary files associated with it, such as the compiled query
|
||||
* output and results.
|
||||
*/
|
||||
|
||||
export async function compileAndRunQueryAgainstDatabase(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
}
|
||||
|
||||
// Read the query metadata if possible, to use in the UI.
|
||||
const metadata = await tryGetQueryMetadata(cliServer, initialInfo.queryPath);
|
||||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryEvaluationInfo(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
initialInfo.quickEvalPosition,
|
||||
metadata,
|
||||
);
|
||||
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
}
|
||||
const target = query.quickEvalPosition ? {
|
||||
quickEval: { quickEvalPos: query.quickEvalPosition }
|
||||
} : { query: {} };
|
||||
|
||||
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
|
||||
const db = dbItem.databaseUri.fsPath;
|
||||
const logPath = queryInfo ? query.evalLogPath : undefined;
|
||||
const queryToRun: messages.RunQueryParams = {
|
||||
db,
|
||||
additionalPacks: diskWorkspaceFolders,
|
||||
externalInputs: {},
|
||||
singletonExternalInputs: templates || {},
|
||||
outputPath: query.resultsPaths.resultsPath,
|
||||
queryPath: initialInfo.queryPath,
|
||||
logPath,
|
||||
target,
|
||||
};
|
||||
await query.createTimestampFile();
|
||||
let result: messages.RunQueryResult | undefined;
|
||||
try {
|
||||
result = await qs.sendRequest(messages.runQuery, queryToRun, token, progress);
|
||||
if (qs.config.customLogDirectory) {
|
||||
void showAndLogWarningMessage(
|
||||
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${query.logPath}.`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
if (queryInfo) {
|
||||
if (await query.hasEvalLog()) {
|
||||
await query.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${query.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
let message;
|
||||
switch (result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
message = `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
break;
|
||||
case messages.QueryResultType.OOM:
|
||||
message = 'out of memory';
|
||||
break;
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
message = `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
break;
|
||||
case messages.QueryResultType.COMPILATION_ERROR:
|
||||
message = `compilation failed: ${result.message}`;
|
||||
break;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
message = result.message ? `failed: ${result.message}` : 'failed';
|
||||
break;
|
||||
}
|
||||
const successful = result.resultType === messages.QueryResultType.SUCCESS;
|
||||
return {
|
||||
query,
|
||||
result: {
|
||||
evaluationTime: result.evaluationTime,
|
||||
queryId: 0,
|
||||
resultType: successful ? legacyMessages.QueryResultType.SUCCESS : legacyMessages.QueryResultType.OTHER_ERROR,
|
||||
runId: 0,
|
||||
message
|
||||
},
|
||||
message,
|
||||
successful,
|
||||
dispose: () => {
|
||||
qs.logger.removeAdditionalLogLocation(undefined);
|
||||
}
|
||||
};
|
||||
}
|
||||
50
extensions/ql-vscode/src/queryRunner.ts
Normal file
50
extensions/ql-vscode/src/queryRunner.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { ProgressCallback } from './commandRunner';
|
||||
import { DatabaseItem } from './databases';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { QueryWithResults } from './run-queries-shared';
|
||||
|
||||
|
||||
|
||||
export abstract class QueryRunner {
|
||||
abstract restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void>;
|
||||
|
||||
abstract cliServer: CodeQLCliServer;
|
||||
|
||||
abstract onStart(arg0: (progress: ProgressCallback, token: CancellationToken) => Promise<void>): void;
|
||||
abstract clearCacheInDatabase(
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken): Promise<void>;
|
||||
|
||||
abstract compileAndRunQueryAgainstDatabase(
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults>;
|
||||
|
||||
abstract deregisterDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
): Promise<void>;
|
||||
|
||||
abstract registerDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
): Promise<void>;
|
||||
|
||||
abstract upgradeDatabaseExplicit(
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<void>
|
||||
|
||||
abstract clearPackCache(): Promise<void>
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { downloadArtifactFromLink } from './gh-actions-api-client';
|
||||
import { downloadArtifactFromLink } from './gh-api/gh-actions-api-client';
|
||||
import { AnalysisSummary } from './shared/remote-query-result';
|
||||
import { AnalysisResults, AnalysisAlert, AnalysisRawResults } from './shared/analysis-result';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
|
||||
@@ -10,31 +10,46 @@ import {
|
||||
} from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { createGist } from './gh-actions-api-client';
|
||||
import { createGist } from './gh-api/gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries-manager';
|
||||
import { generateMarkdown } from './remote-queries-markdown-generation';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisResults, sumAnalysesResults } from './shared/analysis-result';
|
||||
import { RemoteQueryHistoryItem } from './remote-query-history-item';
|
||||
|
||||
/**
|
||||
* Exports the results of the currently-selected remote query.
|
||||
* Exports the results of the given or currently-selected remote query.
|
||||
* The user is prompted to select the export format.
|
||||
*/
|
||||
export async function exportRemoteQueryResults(
|
||||
queryHistoryManager: QueryHistoryManager,
|
||||
remoteQueriesManager: RemoteQueriesManager,
|
||||
ctx: ExtensionContext,
|
||||
queryId?: string,
|
||||
): Promise<void> {
|
||||
const queryHistoryItem = queryHistoryManager.getCurrentQueryHistoryItem();
|
||||
if (!queryHistoryItem || queryHistoryItem.t !== 'remote') {
|
||||
throw new Error('No variant analysis results currently open. To open results, click an item in the query history view.');
|
||||
} else if (!queryHistoryItem.completed) {
|
||||
let queryHistoryItem: RemoteQueryHistoryItem;
|
||||
if (queryId) {
|
||||
const query = queryHistoryManager.getRemoteQueryById(queryId);
|
||||
if (!query) {
|
||||
void logger.log(`Could not find query with id ${queryId}`);
|
||||
throw new Error('There was an error when trying to retrieve variant analysis information');
|
||||
}
|
||||
queryHistoryItem = query;
|
||||
} else {
|
||||
const query = queryHistoryManager.getCurrentQueryHistoryItem();
|
||||
if (!query || query.t !== 'remote') {
|
||||
throw new Error('No variant analysis results currently open. To open results, click an item in the query history view.');
|
||||
}
|
||||
queryHistoryItem = query;
|
||||
}
|
||||
|
||||
if (!queryHistoryItem.completed) {
|
||||
throw new Error('Variant analysis results are not yet available.');
|
||||
}
|
||||
const queryId = queryHistoryItem.queryId;
|
||||
void logger.log(`Exporting variant analysis results for query: ${queryId}`);
|
||||
|
||||
void logger.log(`Exporting variant analysis results for query: ${queryHistoryItem.queryId}`);
|
||||
const query = queryHistoryItem.remoteQuery;
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(queryId);
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(queryHistoryItem.queryId);
|
||||
|
||||
const gistOption = {
|
||||
label: '$(ports-open-browser-icon) Create Gist (GitHub)',
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage, tmpDir } from '../../helpers';
|
||||
import { Credentials } from '../../authentication';
|
||||
import { logger } from '../../logging';
|
||||
import { RemoteQueryWorkflowResult } from '../remote-query-workflow-result';
|
||||
import { DownloadLink, createDownloadPath } from '../download-link';
|
||||
import { RemoteQuery } from '../remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from '../remote-query-result-index';
|
||||
import { getErrorMessage } from '../../pure/helpers-pure';
|
||||
import { unzipFile } from '../../pure/zip';
|
||||
|
||||
export const RESULT_INDEX_ARTIFACT_NAME = 'result-index';
|
||||
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
@@ -44,7 +46,7 @@ export async function getRemoteQueryIndex(
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repoName, workflowRunId);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName('result-index', artifactList);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName(RESULT_INDEX_ARTIFACT_NAME, artifactList);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -108,14 +110,33 @@ export async function downloadArtifactFromLink(
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
// Extract the zipped artifact.
|
||||
await unzipFile(zipFilePath, extractedPath);
|
||||
await unzipBuffer(response.data as ArrayBuffer, zipFilePath, extractedPath);
|
||||
}
|
||||
return path.join(extractedPath, downloadLink.innerFilePath || '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a specific artifact is present in the list of artifacts of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the artifact for.
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @returns A boolean indicating if the artifact is available.
|
||||
*/
|
||||
export async function isArtifactAvailable(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number,
|
||||
artifactName: string,
|
||||
): Promise<boolean> {
|
||||
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repo, workflowRunId);
|
||||
|
||||
return tryGetArtifactIDfromName(artifactName, artifactList) !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the result index artifact and extracts the result index items.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
@@ -277,20 +298,16 @@ async function downloadArtifact(
|
||||
archive_format: 'zip',
|
||||
});
|
||||
const artifactPath = path.join(tmpDir.name, `${artifactId}`);
|
||||
await saveFile(`${artifactPath}.zip`, response.data as ArrayBuffer);
|
||||
await unzipFile(`${artifactPath}.zip`, artifactPath);
|
||||
await unzipBuffer(response.data as ArrayBuffer, `${artifactPath}.zip`, artifactPath);
|
||||
return artifactPath;
|
||||
}
|
||||
|
||||
async function saveFile(filePath: string, data: ArrayBuffer): Promise<void> {
|
||||
async function unzipBuffer(data: ArrayBuffer, filePath: string, destinationPath: string): Promise<void> {
|
||||
void logger.log(`Saving file to ${filePath}`);
|
||||
await fs.writeFile(filePath, Buffer.from(data));
|
||||
}
|
||||
|
||||
async function unzipFile(sourcePath: string, destinationPath: string) {
|
||||
void logger.log(`Unzipping file to ${destinationPath}`);
|
||||
const file = await unzipper.Open.file(sourcePath);
|
||||
await file.extract({ path: destinationPath });
|
||||
await unzipFile(filePath, destinationPath);
|
||||
}
|
||||
|
||||
function getWorkflowError(conclusion: string | null): string {
|
||||
@@ -0,0 +1,96 @@
|
||||
import { Credentials } from '../../authentication';
|
||||
import { OctokitResponse } from '@octokit/types/dist-types';
|
||||
import { VariantAnalysisSubmission } from '../shared/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisRepoTask,
|
||||
VariantAnalysisSubmissionRequest
|
||||
} from './variant-analysis';
|
||||
import { Repository } from './repository';
|
||||
|
||||
export async function submitVariantAnalysis(
|
||||
credentials: Credentials,
|
||||
submissionDetails: VariantAnalysisSubmission
|
||||
): Promise<VariantAnalysis> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const { actionRepoRef, query, databases, controllerRepoId } = submissionDetails;
|
||||
|
||||
const data: VariantAnalysisSubmissionRequest = {
|
||||
action_repo_ref: actionRepoRef,
|
||||
language: query.language,
|
||||
query_pack: query.pack,
|
||||
repositories: databases.repositories,
|
||||
repository_lists: databases.repositoryLists,
|
||||
repository_owners: databases.repositoryOwners,
|
||||
};
|
||||
|
||||
const response: OctokitResponse<VariantAnalysis> = await octokit.request(
|
||||
'POST /repositories/:controllerRepoId/code-scanning/codeql/variant-analyses',
|
||||
{
|
||||
controllerRepoId,
|
||||
data
|
||||
}
|
||||
);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function getVariantAnalysis(
|
||||
credentials: Credentials,
|
||||
controllerRepoId: number,
|
||||
variantAnalysisId: number
|
||||
): Promise<VariantAnalysis> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const response: OctokitResponse<VariantAnalysis> = await octokit.request(
|
||||
'GET /repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId',
|
||||
{
|
||||
controllerRepoId,
|
||||
variantAnalysisId
|
||||
}
|
||||
);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function getVariantAnalysisRepo(
|
||||
credentials: Credentials,
|
||||
controllerRepoId: number,
|
||||
variantAnalysisId: number,
|
||||
repoId: number
|
||||
): Promise<VariantAnalysisRepoTask> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const response: OctokitResponse<VariantAnalysisRepoTask> = await octokit.request(
|
||||
'GET /repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId/repositories/:repoId',
|
||||
{
|
||||
controllerRepoId,
|
||||
variantAnalysisId,
|
||||
repoId
|
||||
}
|
||||
);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function getVariantAnalysisRepoResult(
|
||||
credentials: Credentials,
|
||||
downloadUrl: string,
|
||||
): Promise<ArrayBuffer> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request(`GET ${downloadUrl}`);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function getRepositoryFromNwo(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string
|
||||
): Promise<Repository> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const response = await octokit.rest.repos.get({ owner, repo });
|
||||
return response.data as Repository;
|
||||
}
|
||||
13
extensions/ql-vscode/src/remote-queries/gh-api/repository.ts
Normal file
13
extensions/ql-vscode/src/remote-queries/gh-api/repository.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Defines basic information about a repository.
|
||||
*
|
||||
* Different parts of the API may return different subsets of information
|
||||
* about a repository, but this model represents the very basic information
|
||||
* that will always be available.
|
||||
*/
|
||||
export interface Repository {
|
||||
id: number,
|
||||
name: string,
|
||||
full_name: string,
|
||||
private: boolean,
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { Repository } from './repository';
|
||||
|
||||
export interface VariantAnalysisSubmissionRequest {
|
||||
action_repo_ref: string,
|
||||
language: VariantAnalysisQueryLanguage,
|
||||
query_pack: string,
|
||||
repositories?: string[],
|
||||
repository_lists?: string[],
|
||||
repository_owners?: string[]
|
||||
}
|
||||
|
||||
export type VariantAnalysisQueryLanguage =
|
||||
| 'csharp'
|
||||
| 'cpp'
|
||||
| 'go'
|
||||
| 'java'
|
||||
| 'javascript'
|
||||
| 'python'
|
||||
| 'ruby';
|
||||
|
||||
export interface VariantAnalysis {
|
||||
id: number,
|
||||
controller_repo: Repository,
|
||||
actor_id: number,
|
||||
query_language: VariantAnalysisQueryLanguage,
|
||||
query_pack_url: string,
|
||||
created_at: string,
|
||||
updated_at: string,
|
||||
status: VariantAnalysisStatus,
|
||||
completed_at?: string,
|
||||
actions_workflow_run_id?: number,
|
||||
failure_reason?: VariantAnalysisFailureReason,
|
||||
scanned_repositories?: VariantAnalysisScannedRepository[],
|
||||
skipped_repositories?: VariantAnalysisSkippedRepositories
|
||||
}
|
||||
|
||||
export type VariantAnalysisStatus =
|
||||
| 'in_progress'
|
||||
| 'completed';
|
||||
|
||||
export type VariantAnalysisFailureReason =
|
||||
| 'no_repos_queried'
|
||||
| 'internal_error';
|
||||
|
||||
export type VariantAnalysisRepoStatus =
|
||||
| 'pending'
|
||||
| 'in_progress'
|
||||
| 'succeeded'
|
||||
| 'failed'
|
||||
| 'canceled'
|
||||
| 'timed_out';
|
||||
|
||||
export interface VariantAnalysisScannedRepository {
|
||||
repository: Repository,
|
||||
analysis_status: VariantAnalysisRepoStatus,
|
||||
result_count?: number,
|
||||
artifact_size_in_bytes?: number,
|
||||
failure_message?: string
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryGroup {
|
||||
repository_count: number,
|
||||
repositories: Repository[]
|
||||
}
|
||||
|
||||
export interface VariantAnalysisNotFoundRepositoryGroup {
|
||||
repository_count: number,
|
||||
repository_full_names: string[]
|
||||
}
|
||||
export interface VariantAnalysisRepoTask {
|
||||
repository: Repository,
|
||||
analysis_status: VariantAnalysisRepoStatus,
|
||||
artifact_size_in_bytes?: number,
|
||||
result_count?: number,
|
||||
failure_message?: string,
|
||||
database_commit_sha?: string,
|
||||
source_location_prefix?: string,
|
||||
artifact_url?: string
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositories {
|
||||
access_mismatch_repos?: VariantAnalysisSkippedRepositoryGroup,
|
||||
not_found_repo_nwos?: VariantAnalysisNotFoundRepositoryGroup,
|
||||
no_codeql_db_repos?: VariantAnalysisSkippedRepositoryGroup,
|
||||
over_limit_repos?: VariantAnalysisSkippedRepositoryGroup
|
||||
}
|
||||
@@ -10,10 +10,10 @@ import { ProgressCallback } from '../commandRunner';
|
||||
import { createTimestampFile, showAndLogErrorMessage, showAndLogInformationMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { Logger } from '../logging';
|
||||
import { runRemoteQuery } from './run-remote-query';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries-interface';
|
||||
import { RemoteQueriesView } from './remote-queries-view';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueriesMonitor } from './remote-queries-monitor';
|
||||
import { getRemoteQueryIndex, getRepositoriesMetadata, RepositoriesMetadata } from './gh-actions-api-client';
|
||||
import { getRemoteQueryIndex, getRepositoriesMetadata, RepositoriesMetadata } from './gh-api/gh-actions-api-client';
|
||||
import { RemoteQueryResultIndex } from './remote-query-result-index';
|
||||
import { RemoteQueryResult, sumAnalysisSummariesResults } from './remote-query-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
@@ -22,6 +22,7 @@ import { assertNever } from '../pure/helpers-pure';
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
import { VariantAnalysisManager } from './variant-analysis-manager';
|
||||
|
||||
const autoDownloadMaxSize = 300 * 1024;
|
||||
const autoDownloadMaxCount = 100;
|
||||
@@ -56,18 +57,21 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly interfaceManager: RemoteQueriesInterfaceManager;
|
||||
private readonly variantAnalysisManager: VariantAnalysisManager;
|
||||
private readonly view: RemoteQueriesView;
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
variantAnalysisManager: VariantAnalysisManager,
|
||||
) {
|
||||
super();
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, cliServer, storagePath, logger);
|
||||
this.interfaceManager = new RemoteQueriesInterfaceManager(ctx, logger, this.analysesResultsManager);
|
||||
this.view = new RemoteQueriesView(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(ctx, logger);
|
||||
this.variantAnalysisManager = variantAnalysisManager;
|
||||
|
||||
this.remoteQueryAddedEventEmitter = this.push(new EventEmitter<NewQueryEvent>());
|
||||
this.remoteQueryRemovedEventEmitter = this.push(new EventEmitter<RemovedQueryEvent>());
|
||||
@@ -75,6 +79,8 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
this.onRemoteQueryAdded = this.remoteQueryAddedEventEmitter.event;
|
||||
this.onRemoteQueryRemoved = this.remoteQueryRemovedEventEmitter.event;
|
||||
this.onRemoteQueryStatusUpdate = this.remoteQueryStatusUpdateEventEmitter.event;
|
||||
|
||||
this.push(this.view);
|
||||
}
|
||||
|
||||
public async rehydrateRemoteQuery(queryId: string, query: RemoteQuery, status: QueryStatus) {
|
||||
@@ -121,11 +127,12 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
credentials, uri || window.activeTextEditor?.document.uri,
|
||||
false,
|
||||
progress,
|
||||
token);
|
||||
token,
|
||||
this.variantAnalysisManager);
|
||||
|
||||
if (querySubmission?.query) {
|
||||
const query = querySubmission.query;
|
||||
const queryId = this.createQueryId(query.queryName);
|
||||
const queryId = this.createQueryId();
|
||||
|
||||
await this.prepareStorageDirectory(queryId);
|
||||
await this.storeJsonFile(queryId, 'query.json', query);
|
||||
@@ -190,7 +197,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results, queryResult.queryId));
|
||||
results => this.view.setAnalysisResults(results, queryResult.queryId));
|
||||
}
|
||||
|
||||
public async copyRemoteQueryRepoListToClipboard(queryId: string) {
|
||||
@@ -246,7 +253,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
}
|
||||
|
||||
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
await this.interfaceManager.showResults(query, queryResult);
|
||||
await this.view.showResults(query, queryResult);
|
||||
}
|
||||
|
||||
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
|
||||
@@ -262,11 +269,10 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
|
||||
/**
|
||||
* Generates a unique id for this query, suitable for determining the storage location for the downloaded query artifacts.
|
||||
* @param queryName
|
||||
* @returns
|
||||
* @returns A unique id for this query.
|
||||
*/
|
||||
private createQueryId(queryName: string): string {
|
||||
return `${queryName}-${nanoid()}`;
|
||||
private createQueryId(): string {
|
||||
return nanoid();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -138,7 +138,7 @@ function generateMarkdownForCodeSnippet(
|
||||
const codeLines = codeSnippet.text
|
||||
.split('\n')
|
||||
.map((line, index) =>
|
||||
highlightCodeLines(line, index + snippetStartLine, highlightedRegion)
|
||||
highlightAndEscapeCodeLines(line, index + snippetStartLine, highlightedRegion)
|
||||
);
|
||||
|
||||
// Make sure there are no extra newlines before or after the <code> block:
|
||||
@@ -153,20 +153,25 @@ function generateMarkdownForCodeSnippet(
|
||||
return lines;
|
||||
}
|
||||
|
||||
function highlightCodeLines(
|
||||
function highlightAndEscapeCodeLines(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return line;
|
||||
return escapeHtmlCharacters(line);
|
||||
}
|
||||
const partiallyHighlightedLine = parseHighlightedLine(
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
);
|
||||
return `${partiallyHighlightedLine.plainSection1}<strong>${partiallyHighlightedLine.highlightedSection}</strong>${partiallyHighlightedLine.plainSection2}`;
|
||||
|
||||
const plainSection1 = escapeHtmlCharacters(partiallyHighlightedLine.plainSection1);
|
||||
const highlightedSection = escapeHtmlCharacters(partiallyHighlightedLine.highlightedSection);
|
||||
const plainSection2 = escapeHtmlCharacters(partiallyHighlightedLine.plainSection2);
|
||||
|
||||
return `${plainSection1}<strong>${highlightedSection}</strong>${plainSection2}`;
|
||||
}
|
||||
|
||||
function generateMarkdownForAlertMessage(
|
||||
@@ -330,3 +335,10 @@ function createFileName(nwo: string) {
|
||||
const [owner, repo] = nwo.split('/');
|
||||
return `${owner}-${repo}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape characters that could be interpreted as HTML instead of raw code.
|
||||
*/
|
||||
function escapeHtmlCharacters(text: string): string {
|
||||
return text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { getWorkflowStatus } from './gh-actions-api-client';
|
||||
import { getWorkflowStatus, isArtifactAvailable, RESULT_INDEX_ARTIFACT_NAME } from './gh-api/gh-actions-api-client';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
|
||||
@@ -42,7 +42,25 @@ export class RemoteQueriesMonitor {
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId);
|
||||
|
||||
if (workflowStatus.status !== 'InProgress') {
|
||||
// Even if the workflow indicates it has completed, artifacts
|
||||
// might still take a while to become available. So we need to
|
||||
// check for the artifact before we can declare the workflow
|
||||
// as having completed.
|
||||
if (workflowStatus.status === 'CompletedSuccessfully') {
|
||||
const resultIndexAvailable = await isArtifactAvailable(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId,
|
||||
RESULT_INDEX_ARTIFACT_NAME
|
||||
);
|
||||
|
||||
if (resultIndexAvailable) {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
// We don't have a result-index yet, so we'll keep monitoring.
|
||||
} else if (workflowStatus.status !== 'InProgress') {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
workspace,
|
||||
commands
|
||||
commands,
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -16,7 +15,6 @@ import {
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
} from '../pure/interface-types';
|
||||
import { Logger } from '../logging';
|
||||
import { getHtmlForWebview } from '../interface-utils';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import {
|
||||
AnalysisSummary,
|
||||
@@ -34,18 +32,17 @@ import { SHOW_QUERY_TEXT_MSG } from '../query-history';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
import { humanizeUnit } from '../pure/time';
|
||||
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
|
||||
|
||||
export class RemoteQueriesInterfaceManager {
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
export class RemoteQueriesView extends AbstractWebview<ToRemoteQueriesMessage, FromRemoteQueriesMessage> {
|
||||
private currentQueryId: string | undefined;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
ctx: ExtensionContext,
|
||||
private readonly logger: Logger,
|
||||
private readonly analysesResultsManager: AnalysesResultsManager
|
||||
) {
|
||||
super(ctx);
|
||||
this.panelLoadedCallBacks.push(() => {
|
||||
void logger.log('Variant analysis results view loaded');
|
||||
});
|
||||
@@ -103,76 +100,56 @@ export class RemoteQueriesInterfaceManager {
|
||||
};
|
||||
}
|
||||
|
||||
getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'remoteQueriesView',
|
||||
'CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
Uri.file(this.analysesResultsManager.storagePath),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'node_modules/@vscode/codicons/dist')),
|
||||
],
|
||||
}
|
||||
));
|
||||
this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.currentQueryId = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
|
||||
const scriptPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remoteQueriesView.js')
|
||||
);
|
||||
|
||||
const baseStylesheetUriOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remote-queries/view/baseStyles.css')
|
||||
);
|
||||
|
||||
const stylesheetPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remote-queries/view/remoteQueries.css')
|
||||
);
|
||||
|
||||
// Allows use of the VS Code "codicons" icon set.
|
||||
// See https://github.com/microsoft/vscode-codicons
|
||||
const codiconsPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('node_modules/@vscode/codicons/dist/codicon.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[baseStylesheetUriOnDisk, stylesheetPathOnDisk, codiconsPathOnDisk],
|
||||
true
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
)
|
||||
);
|
||||
}
|
||||
return this.panel;
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: 'remoteQueriesView',
|
||||
title: 'CodeQL Query Results',
|
||||
viewColumn: ViewColumn.Active,
|
||||
preserveFocus: true,
|
||||
view: 'remote-queries',
|
||||
additionalOptions: {
|
||||
localResourceRoots: [
|
||||
Uri.file(this.analysesResultsManager.storagePath)
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
protected onPanelDispose(): void {
|
||||
this.currentQueryId = undefined;
|
||||
}
|
||||
|
||||
protected async onMessage(msg: FromRemoteQueriesMessage): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'viewLoaded':
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
case 'remoteQueryError':
|
||||
void this.logger.log(
|
||||
`Variant analysis error: ${msg.error}`
|
||||
);
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
case 'openVirtualFile':
|
||||
await this.openVirtualFile(msg.queryText);
|
||||
break;
|
||||
case 'copyRepoList':
|
||||
await commands.executeCommand('codeQL.copyRepoList', msg.queryId);
|
||||
break;
|
||||
case 'remoteQueryDownloadAnalysisResults':
|
||||
await this.downloadAnalysisResults(msg);
|
||||
break;
|
||||
case 'remoteQueryDownloadAllAnalysesResults':
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case 'remoteQueryExportResults':
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults', msg.queryId);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private async openFile(filePath: string) {
|
||||
@@ -200,43 +177,6 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
}
|
||||
|
||||
private async handleMsgFromView(
|
||||
msg: FromRemoteQueriesMessage
|
||||
): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'remoteQueryLoaded':
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'remoteQueryError':
|
||||
void this.logger.log(
|
||||
`Variant analysis error: ${msg.error}`
|
||||
);
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
case 'openVirtualFile':
|
||||
await this.openVirtualFile(msg.queryText);
|
||||
break;
|
||||
case 'copyRepoList':
|
||||
await commands.executeCommand('codeQL.copyRepoList', msg.queryId);
|
||||
break;
|
||||
case 'remoteQueryDownloadAnalysisResults':
|
||||
await this.downloadAnalysisResults(msg);
|
||||
break;
|
||||
case 'remoteQueryDownloadAllAnalysesResults':
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case 'remoteQueryExportResults':
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults');
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private async downloadAnalysisResults(msg: RemoteQueryDownloadAnalysisResultsMessage): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.downloadAnalysisResults(
|
||||
@@ -261,10 +201,6 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
}
|
||||
|
||||
private postMessage(msg: ToRemoteQueriesMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private getDuration(startTime: number, endTime: number): string {
|
||||
const diffInMs = startTime - endTime;
|
||||
return humanizeUnit(diffInMs);
|
||||
@@ -1,6 +1,8 @@
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { VariantAnalysis } from './shared/variant-analysis';
|
||||
|
||||
export interface RemoteQuerySubmissionResult {
|
||||
queryDirPath?: string;
|
||||
query?: RemoteQuery;
|
||||
variantAnalysis?: VariantAnalysis;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CancellationToken, Uri, window } from 'vscode';
|
||||
import { CancellationToken, commands, Uri, window } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as fs from 'fs-extra';
|
||||
@@ -12,19 +12,24 @@ import {
|
||||
showAndLogInformationMessage,
|
||||
tryGetQueryMetadata,
|
||||
pluralize,
|
||||
tmpDir
|
||||
tmpDir,
|
||||
} from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import * as cli from '../cli';
|
||||
import { logger } from '../logging';
|
||||
import { getActionBranch, getRemoteControllerRepo, setRemoteControllerRepo } from '../config';
|
||||
import { getActionBranch, getRemoteControllerRepo, isVariantAnalysisLiveResultsEnabled, setRemoteControllerRepo } from '../config';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { OctokitResponse } from '@octokit/types/dist-types';
|
||||
import { OctokitResponse, RequestError } from '@octokit/types/dist-types';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { getErrorMessage, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import * as ghApiClient from './gh-api/gh-api-client';
|
||||
import { getRepositorySelection, isValidSelection, RepositorySelection } from './repository-selection';
|
||||
import { parseVariantAnalysisQueryLanguage, VariantAnalysisSubmission } from './shared/variant-analysis';
|
||||
import { Repository } from './shared/repository';
|
||||
import { processVariantAnalysis } from './variant-analysis-processor';
|
||||
import { VariantAnalysisManager } from './variant-analysis-manager';
|
||||
|
||||
export interface QlPack {
|
||||
name: string;
|
||||
@@ -178,7 +183,8 @@ export async function runRemoteQuery(
|
||||
uri: Uri | undefined,
|
||||
dryRun: boolean,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
token: CancellationToken,
|
||||
variantAnalysisManager: VariantAnalysisManager
|
||||
): Promise<void | RemoteQuerySubmissionResult> {
|
||||
if (!(await cliServer.cliConstraints.supportsRemoteQueries())) {
|
||||
throw new Error(`Variant analysis is not supported by this version of CodeQL. Please upgrade to v${cli.CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES
|
||||
@@ -210,31 +216,7 @@ export async function runRemoteQuery(
|
||||
message: 'Determining controller repo'
|
||||
});
|
||||
|
||||
// Get the controller repo from the config, if it exists.
|
||||
// If it doesn't exist, prompt the user to enter it, and save that value to the config.
|
||||
let controllerRepo: string | undefined;
|
||||
controllerRepo = getRemoteControllerRepo();
|
||||
if (!controllerRepo || !REPO_REGEX.test(controllerRepo)) {
|
||||
void logger.log(controllerRepo ? 'Invalid controller repository name.' : 'No controller repository defined.');
|
||||
controllerRepo = await window.showInputBox({
|
||||
title: 'Controller repository in which to display progress and results of variant analysis',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Enter the name of a GitHub repository in the format <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!controllerRepo) {
|
||||
void showAndLogErrorMessage('No controller repository entered.');
|
||||
return;
|
||||
} else if (!REPO_REGEX.test(controllerRepo)) { // Check if user entered invalid input
|
||||
void showAndLogErrorMessage('Invalid repository format. Must be a valid GitHub repository in the format <owner>/<repo>.');
|
||||
return;
|
||||
}
|
||||
void logger.log(`Setting the controller repository as: ${controllerRepo}`);
|
||||
await setRemoteControllerRepo(controllerRepo);
|
||||
}
|
||||
|
||||
void logger.log(`Using controller repository: ${controllerRepo}`);
|
||||
const [owner, repo] = controllerRepo.split('/');
|
||||
const controllerRepo = await getControllerRepo(credentials);
|
||||
|
||||
progress({
|
||||
maxStep: 4,
|
||||
@@ -259,31 +241,77 @@ export async function runRemoteQuery(
|
||||
});
|
||||
|
||||
const actionBranch = getActionBranch();
|
||||
const apiResponse = await runRemoteQueriesApiRequest(credentials, actionBranch, language, repoSelection, owner, repo, base64Pack, dryRun);
|
||||
const queryStartTime = Date.now();
|
||||
const queryMetadata = await tryGetQueryMetadata(cliServer, queryFile);
|
||||
|
||||
if (dryRun) {
|
||||
return { queryDirPath: remoteQueryDir.path };
|
||||
} else {
|
||||
if (!apiResponse) {
|
||||
return;
|
||||
if (isVariantAnalysisLiveResultsEnabled()) {
|
||||
const queryName = getQueryName(queryMetadata, queryFile);
|
||||
const variantAnalysisLanguage = parseVariantAnalysisQueryLanguage(language);
|
||||
if (variantAnalysisLanguage === undefined) {
|
||||
throw new UserCancellationException(`Found unsupported language: ${language}`);
|
||||
}
|
||||
|
||||
const workflowRunId = apiResponse.workflow_run_id;
|
||||
const repositoryCount = apiResponse.repositories_queried.length;
|
||||
const remoteQuery = await buildRemoteQueryEntity(
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
owner,
|
||||
repo,
|
||||
queryStartTime,
|
||||
workflowRunId,
|
||||
language,
|
||||
repositoryCount);
|
||||
const queryText = await fs.readFile(queryFile, 'utf8');
|
||||
|
||||
// don't return the path because it has been deleted
|
||||
return { query: remoteQuery };
|
||||
const variantAnalysisSubmission: VariantAnalysisSubmission = {
|
||||
startTime: queryStartTime,
|
||||
actionRepoRef: actionBranch,
|
||||
controllerRepoId: controllerRepo.id,
|
||||
query: {
|
||||
name: queryName,
|
||||
filePath: queryFile,
|
||||
pack: base64Pack,
|
||||
language: variantAnalysisLanguage,
|
||||
text: queryText,
|
||||
},
|
||||
databases: {
|
||||
repositories: repoSelection.repositories,
|
||||
repositoryLists: repoSelection.repositoryLists,
|
||||
repositoryOwners: repoSelection.owners
|
||||
}
|
||||
};
|
||||
|
||||
const variantAnalysisResponse = await ghApiClient.submitVariantAnalysis(
|
||||
credentials,
|
||||
variantAnalysisSubmission
|
||||
);
|
||||
|
||||
const processedVariantAnalysis = processVariantAnalysis(variantAnalysisSubmission, variantAnalysisResponse);
|
||||
|
||||
variantAnalysisManager.onVariantAnalysisSubmitted(processedVariantAnalysis);
|
||||
|
||||
void logger.log(`Variant analysis:\n${JSON.stringify(processedVariantAnalysis, null, 2)}`);
|
||||
|
||||
void showAndLogInformationMessage(`Variant analysis ${processedVariantAnalysis.query.name} submitted for processing`);
|
||||
|
||||
void commands.executeCommand('codeQL.openVariantAnalysisView', processedVariantAnalysis.id);
|
||||
void commands.executeCommand('codeQL.monitorVariantAnalysis', processedVariantAnalysis);
|
||||
|
||||
return { variantAnalysis: processedVariantAnalysis };
|
||||
} else {
|
||||
const apiResponse = await runRemoteQueriesApiRequest(credentials, actionBranch, language, repoSelection, controllerRepo, base64Pack, dryRun);
|
||||
|
||||
if (dryRun) {
|
||||
return { queryDirPath: remoteQueryDir.path };
|
||||
} else {
|
||||
if (!apiResponse) {
|
||||
return;
|
||||
}
|
||||
|
||||
const workflowRunId = apiResponse.workflow_run_id;
|
||||
const repositoryCount = apiResponse.repositories_queried.length;
|
||||
const remoteQuery = await buildRemoteQueryEntity(
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
controllerRepo,
|
||||
queryStartTime,
|
||||
workflowRunId,
|
||||
language,
|
||||
repositoryCount);
|
||||
|
||||
// don't return the path because it has been deleted
|
||||
return { query: remoteQuery };
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
@@ -301,8 +329,7 @@ async function runRemoteQueriesApiRequest(
|
||||
ref: string,
|
||||
language: string,
|
||||
repoSelection: RepositorySelection,
|
||||
owner: string,
|
||||
repo: string,
|
||||
controllerRepo: Repository,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
): Promise<void | QueriesResponse> {
|
||||
@@ -318,8 +345,7 @@ async function runRemoteQueriesApiRequest(
|
||||
if (dryRun) {
|
||||
void showAndLogInformationMessage('[DRY RUN] Would have sent request. See extension log for the payload.');
|
||||
void logger.log(JSON.stringify({
|
||||
owner,
|
||||
repo,
|
||||
controllerRepo,
|
||||
data: {
|
||||
...data,
|
||||
queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes'
|
||||
@@ -331,14 +357,13 @@ async function runRemoteQueriesApiRequest(
|
||||
try {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response: OctokitResponse<QueriesResponse, number> = await octokit.request(
|
||||
'POST /repos/:owner/:repo/code-scanning/codeql/queries',
|
||||
'POST /repositories/:controllerRepoId/code-scanning/codeql/queries',
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
controllerRepoId: controllerRepo.id,
|
||||
data
|
||||
}
|
||||
);
|
||||
const { popupMessage, logMessage } = parseResponse(owner, repo, response.data);
|
||||
const { popupMessage, logMessage } = parseResponse(controllerRepo, response.data);
|
||||
void showAndLogInformationMessage(popupMessage, { fullMessage: logMessage });
|
||||
return response.data;
|
||||
} catch (error: any) {
|
||||
@@ -354,14 +379,14 @@ const eol = os.EOL;
|
||||
const eol2 = os.EOL + os.EOL;
|
||||
|
||||
// exported for testing only
|
||||
export function parseResponse(owner: string, repo: string, response: QueriesResponse) {
|
||||
export function parseResponse(controllerRepo: Repository, response: QueriesResponse) {
|
||||
const repositoriesQueried = response.repositories_queried;
|
||||
const repositoryCount = repositoriesQueried.length;
|
||||
|
||||
const popupMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}).`
|
||||
const popupMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. [Click here to see the progress](https://github.com/${controllerRepo.fullName}/actions/runs/${response.workflow_run_id}).`
|
||||
+ (response.errors ? `${eol2}Some repositories could not be scheduled. See extension log for details.` : '');
|
||||
|
||||
let logMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. See https://github.com/${owner}/${repo}/actions/runs/${response.workflow_run_id}.`;
|
||||
let logMessage = `Successfully scheduled runs on ${pluralize(repositoryCount, 'repository', 'repositories')}. See https://github.com/${controllerRepo.fullName}/actions/runs/${response.workflow_run_id}.`;
|
||||
logMessage += `${eol2}Repositories queried:${eol}${repositoriesQueried.join(', ')}`;
|
||||
if (response.errors) {
|
||||
const { invalid_repositories, repositories_without_database, private_repositories, cutoff_repositories, cutoff_repositories_count } = response.errors;
|
||||
@@ -425,17 +450,15 @@ async function ensureNameAndSuite(queryPackDir: string, packRelativePath: string
|
||||
async function buildRemoteQueryEntity(
|
||||
queryFilePath: string,
|
||||
queryMetadata: QueryMetadata | undefined,
|
||||
controllerRepoOwner: string,
|
||||
controllerRepoName: string,
|
||||
controllerRepo: Repository,
|
||||
queryStartTime: number,
|
||||
workflowRunId: number,
|
||||
language: string,
|
||||
repositoryCount: number
|
||||
): Promise<RemoteQuery> {
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
const queryName = queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
|
||||
const queryName = getQueryName(queryMetadata, queryFilePath);
|
||||
const queryText = await fs.readFile(queryFilePath, 'utf8');
|
||||
const [owner, name] = controllerRepo.fullName.split('/');
|
||||
|
||||
return {
|
||||
queryName,
|
||||
@@ -443,11 +466,59 @@ async function buildRemoteQueryEntity(
|
||||
queryText,
|
||||
language,
|
||||
controllerRepository: {
|
||||
owner: controllerRepoOwner,
|
||||
name: controllerRepoName,
|
||||
owner,
|
||||
name,
|
||||
},
|
||||
executionStartTime: queryStartTime,
|
||||
actionsWorkflowRunId: workflowRunId,
|
||||
repositoryCount,
|
||||
};
|
||||
}
|
||||
|
||||
function getQueryName(queryMetadata: QueryMetadata | undefined, queryFilePath: string): string {
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
return queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
}
|
||||
|
||||
export async function getControllerRepo(credentials: Credentials): Promise<Repository> {
|
||||
// Get the controller repo from the config, if it exists.
|
||||
// If it doesn't exist, prompt the user to enter it, and save that value to the config.
|
||||
let controllerRepoNwo: string | undefined;
|
||||
controllerRepoNwo = getRemoteControllerRepo();
|
||||
if (!controllerRepoNwo || !REPO_REGEX.test(controllerRepoNwo)) {
|
||||
void logger.log(controllerRepoNwo ? 'Invalid controller repository name.' : 'No controller repository defined.');
|
||||
controllerRepoNwo = await window.showInputBox({
|
||||
title: 'Controller repository in which to run the GitHub Actions workflow for this variant analysis',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Enter the name of a GitHub repository in the format <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!controllerRepoNwo) {
|
||||
throw new UserCancellationException('No controller repository entered.');
|
||||
} else if (!REPO_REGEX.test(controllerRepoNwo)) { // Check if user entered invalid input
|
||||
throw new UserCancellationException('Invalid repository format. Must be a valid GitHub repository in the format <owner>/<repo>.');
|
||||
}
|
||||
void logger.log(`Setting the controller repository as: ${controllerRepoNwo}`);
|
||||
await setRemoteControllerRepo(controllerRepoNwo);
|
||||
}
|
||||
|
||||
void logger.log(`Using controller repository: ${controllerRepoNwo}`);
|
||||
const [owner, repo] = controllerRepoNwo.split('/');
|
||||
|
||||
try {
|
||||
const controllerRepo = await ghApiClient.getRepositoryFromNwo(credentials, owner, repo);
|
||||
void logger.log(`Controller repository ID: ${controllerRepo.id}`);
|
||||
return {
|
||||
id: controllerRepo.id,
|
||||
fullName: controllerRepo.full_name,
|
||||
private: controllerRepo.private,
|
||||
};
|
||||
|
||||
} catch (e: any) {
|
||||
if ((e as RequestError).status === 404) {
|
||||
throw new Error(`Controller repository "${owner}/${repo}" not found`);
|
||||
} else {
|
||||
throw new Error(`Error getting controller repository "${owner}/${repo}": ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
export interface Repository {
|
||||
id: number,
|
||||
fullName: string,
|
||||
private: boolean,
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { VariantAnalysis } from './variant-analysis';
|
||||
|
||||
export type VariantAnalysisMonitorStatus =
|
||||
| 'InProgress'
|
||||
| 'CompletedSuccessfully'
|
||||
| 'CompletedUnsuccessfully'
|
||||
| 'Failed'
|
||||
| 'Cancelled'
|
||||
| 'TimedOut';
|
||||
|
||||
export interface VariantAnalysisMonitorResult {
|
||||
status: VariantAnalysisMonitorStatus;
|
||||
error?: string;
|
||||
scannedReposDownloaded?: number[],
|
||||
variantAnalysis?: VariantAnalysis
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
import { Repository } from './repository';
|
||||
import { AnalysisAlert, AnalysisRawResults } from './analysis-result';
|
||||
|
||||
export interface VariantAnalysis {
|
||||
id: number,
|
||||
controllerRepoId: number,
|
||||
query: {
|
||||
name: string,
|
||||
filePath: string,
|
||||
language: VariantAnalysisQueryLanguage,
|
||||
text: string,
|
||||
},
|
||||
databases: {
|
||||
repositories?: string[],
|
||||
repositoryLists?: string[],
|
||||
repositoryOwners?: string[],
|
||||
},
|
||||
createdAt: string,
|
||||
updatedAt: string,
|
||||
executionStartTime: number;
|
||||
status: VariantAnalysisStatus,
|
||||
completedAt?: string,
|
||||
actionsWorkflowRunId?: number,
|
||||
failureReason?: VariantAnalysisFailureReason,
|
||||
scannedRepos?: VariantAnalysisScannedRepository[],
|
||||
skippedRepos?: VariantAnalysisSkippedRepositories
|
||||
}
|
||||
|
||||
export enum VariantAnalysisQueryLanguage {
|
||||
CSharp = 'csharp',
|
||||
Cpp = 'cpp',
|
||||
Go = 'go',
|
||||
Java = 'java',
|
||||
Javascript = 'javascript',
|
||||
Python = 'python',
|
||||
Ruby = 'ruby'
|
||||
}
|
||||
|
||||
export function parseVariantAnalysisQueryLanguage(language: string): VariantAnalysisQueryLanguage | undefined {
|
||||
return Object.values(VariantAnalysisQueryLanguage).find(x => x === language);
|
||||
}
|
||||
|
||||
export enum VariantAnalysisStatus {
|
||||
InProgress = 'inProgress',
|
||||
Succeeded = 'succeeded',
|
||||
Failed = 'failed',
|
||||
Canceled = 'canceled',
|
||||
}
|
||||
|
||||
export enum VariantAnalysisFailureReason {
|
||||
NoReposQueried = 'noReposQueried',
|
||||
InternalError = 'internalError',
|
||||
}
|
||||
|
||||
export enum VariantAnalysisRepoStatus {
|
||||
Pending = 'pending',
|
||||
InProgress = 'inProgress',
|
||||
Succeeded = 'succeeded',
|
||||
Failed = 'failed',
|
||||
Canceled = 'canceled',
|
||||
TimedOut = 'timedOut',
|
||||
}
|
||||
|
||||
export interface VariantAnalysisScannedRepository {
|
||||
repository: Repository,
|
||||
analysisStatus: VariantAnalysisRepoStatus,
|
||||
resultCount?: number,
|
||||
artifactSizeInBytes?: number,
|
||||
failureMessage?: string
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositories {
|
||||
accessMismatchRepos?: VariantAnalysisSkippedRepositoryGroup,
|
||||
notFoundRepos?: VariantAnalysisSkippedRepositoryGroup,
|
||||
noCodeqlDbRepos?: VariantAnalysisSkippedRepositoryGroup,
|
||||
overLimitRepos?: VariantAnalysisSkippedRepositoryGroup
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepositoryGroup {
|
||||
repositoryCount: number,
|
||||
repositories: VariantAnalysisSkippedRepository[],
|
||||
}
|
||||
|
||||
export interface VariantAnalysisSkippedRepository {
|
||||
id?: number,
|
||||
fullName: string,
|
||||
private?: boolean,
|
||||
}
|
||||
|
||||
export enum VariantAnalysisScannedRepositoryDownloadStatus {
|
||||
Pending = 'pending',
|
||||
InProgress = 'inProgress',
|
||||
Succeeded = 'succeeded',
|
||||
Failed = 'failed',
|
||||
}
|
||||
|
||||
export interface VariantAnalysisScannedRepositoryState {
|
||||
repositoryId: number;
|
||||
downloadStatus: VariantAnalysisScannedRepositoryDownloadStatus;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisScannedRepositoryResult {
|
||||
variantAnalysisId: number;
|
||||
repositoryId: number;
|
||||
interpretedResults?: AnalysisAlert[];
|
||||
rawResults?: AnalysisRawResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures information needed to submit a variant
|
||||
* analysis for processing.
|
||||
*/
|
||||
export interface VariantAnalysisSubmission {
|
||||
startTime: number,
|
||||
controllerRepoId: number,
|
||||
actionRepoRef: string,
|
||||
query: {
|
||||
name: string,
|
||||
filePath: string,
|
||||
language: VariantAnalysisQueryLanguage,
|
||||
text: string,
|
||||
|
||||
// Base64 encoded query pack.
|
||||
pack: string,
|
||||
},
|
||||
databases: {
|
||||
repositories?: string[],
|
||||
repositoryLists?: string[],
|
||||
repositoryOwners?: string[],
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param status
|
||||
* @returns whether the status is in a completed state, i.e. it cannot normally change state anymore
|
||||
*/
|
||||
export function isCompletedAnalysisRepoStatus(status: VariantAnalysisRepoStatus): boolean {
|
||||
return [
|
||||
// All states that indicates the repository has been scanned and cannot
|
||||
// change status anymore.
|
||||
VariantAnalysisRepoStatus.Succeeded, VariantAnalysisRepoStatus.Failed,
|
||||
VariantAnalysisRepoStatus.Canceled, VariantAnalysisRepoStatus.TimedOut,
|
||||
].includes(status);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param repo
|
||||
* @returns whether the repo scan is in a completed state, i.e. it cannot normally change state anymore
|
||||
*/
|
||||
export function hasRepoScanCompleted(repo: VariantAnalysisScannedRepository): boolean {
|
||||
return isCompletedAnalysisRepoStatus(repo.analysisStatus);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param repos
|
||||
* @returns the total number of results. Will be `undefined` when there are no repos with results.
|
||||
*/
|
||||
export function getTotalResultCount(repos: VariantAnalysisScannedRepository[] | undefined): number | undefined {
|
||||
const reposWithResultCounts = repos?.filter(repo => repo.resultCount !== undefined);
|
||||
if (reposWithResultCounts === undefined || reposWithResultCounts.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return reposWithResultCounts.reduce((acc, repo) => acc + (repo.resultCount ?? 0), 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param skippedRepos
|
||||
* @returns the total number of skipped repositories.
|
||||
*/
|
||||
export function getSkippedRepoCount(skippedRepos: VariantAnalysisSkippedRepositories | undefined): number {
|
||||
if (!skippedRepos) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return Object.values(skippedRepos).reduce((acc, group) => acc + group.repositoryCount, 0);
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
import { TextDocumentContentProvider, Uri } from 'vscode';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { showAndLogWarningMessage } from '../helpers';
|
||||
import { SHOW_QUERY_TEXT_MSG } from '../query-history';
|
||||
import { VariantAnalysisManager } from './variant-analysis-manager';
|
||||
|
||||
export const createVariantAnalysisContentProvider = (variantAnalysisManager: VariantAnalysisManager): TextDocumentContentProvider => ({
|
||||
async provideTextDocumentContent(uri: Uri): Promise<string | undefined> {
|
||||
const params = new URLSearchParams(uri.query);
|
||||
|
||||
const variantAnalysisIdString = params.get('variantAnalysisId');
|
||||
if (!variantAnalysisIdString) {
|
||||
void showAndLogWarningMessage('Unable to show query text. No variant analysis ID provided.');
|
||||
return undefined;
|
||||
}
|
||||
const variantAnalysisId = parseInt(variantAnalysisIdString);
|
||||
|
||||
const variantAnalysis = await variantAnalysisManager.getVariantAnalysis(variantAnalysisId);
|
||||
if (!variantAnalysis) {
|
||||
void showAndLogWarningMessage('Unable to show query text. No variant analysis found.');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return SHOW_QUERY_TEXT_MSG + variantAnalysis.query.text;
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,15 @@
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { VariantAnalysis } from './shared/variant-analysis';
|
||||
|
||||
/**
|
||||
* Information about a variant analysis.
|
||||
*/
|
||||
export interface VariantAnalysisHistoryItem {
|
||||
readonly t: 'variant-analysis';
|
||||
failureReason?: string;
|
||||
resultCount?: number;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
variantAnalysis: VariantAnalysis;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
@@ -0,0 +1,197 @@
|
||||
import * as ghApiClient from './gh-api/gh-api-client';
|
||||
import { CancellationToken, commands, EventEmitter, ExtensionContext, window } from 'vscode';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { Logger } from '../logging';
|
||||
import { Credentials } from '../authentication';
|
||||
import { VariantAnalysisMonitor } from './variant-analysis-monitor';
|
||||
import {
|
||||
VariantAnalysis as VariantAnalysisApiResponse,
|
||||
VariantAnalysisRepoTask,
|
||||
VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository
|
||||
} from './gh-api/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis, VariantAnalysisQueryLanguage,
|
||||
VariantAnalysisScannedRepositoryDownloadStatus,
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
VariantAnalysisScannedRepositoryState
|
||||
} from './shared/variant-analysis';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { VariantAnalysisView } from './variant-analysis-view';
|
||||
import { VariantAnalysisViewManager } from './variant-analysis-view-manager';
|
||||
import { VariantAnalysisResultsManager } from './variant-analysis-results-manager';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { getControllerRepo } from './run-remote-query';
|
||||
import { processUpdatedVariantAnalysis } from './variant-analysis-processor';
|
||||
|
||||
export class VariantAnalysisManager extends DisposableObject implements VariantAnalysisViewManager<VariantAnalysisView> {
|
||||
private readonly _onVariantAnalysisAdded = this.push(new EventEmitter<VariantAnalysis>());
|
||||
public readonly onVariantAnalysisAdded = this._onVariantAnalysisAdded.event;
|
||||
|
||||
private readonly variantAnalysisMonitor: VariantAnalysisMonitor;
|
||||
private readonly variantAnalysisResultsManager: VariantAnalysisResultsManager;
|
||||
private readonly variantAnalyses = new Map<number, VariantAnalysis>();
|
||||
private readonly views = new Map<number, VariantAnalysisView>();
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
cliServer: CodeQLCliServer,
|
||||
storagePath: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
super();
|
||||
this.variantAnalysisMonitor = this.push(new VariantAnalysisMonitor(ctx, logger));
|
||||
this.variantAnalysisMonitor.onVariantAnalysisChange(this.onVariantAnalysisUpdated.bind(this));
|
||||
|
||||
this.variantAnalysisResultsManager = this.push(new VariantAnalysisResultsManager(cliServer, storagePath, logger));
|
||||
this.variantAnalysisResultsManager.onResultLoaded(this.onRepoResultLoaded.bind(this));
|
||||
}
|
||||
|
||||
public async showView(variantAnalysisId: number): Promise<void> {
|
||||
if (!this.views.has(variantAnalysisId)) {
|
||||
// The view will register itself with the manager, so we don't need to do anything here.
|
||||
this.push(new VariantAnalysisView(this.ctx, variantAnalysisId, this));
|
||||
}
|
||||
|
||||
const variantAnalysisView = this.views.get(variantAnalysisId)!;
|
||||
await variantAnalysisView.openView();
|
||||
return;
|
||||
}
|
||||
|
||||
public registerView(view: VariantAnalysisView): void {
|
||||
if (this.views.has(view.variantAnalysisId)) {
|
||||
throw new Error(`View for variant analysis with id: ${view.variantAnalysisId} already exists`);
|
||||
}
|
||||
|
||||
this.views.set(view.variantAnalysisId, view);
|
||||
}
|
||||
|
||||
public unregisterView(view: VariantAnalysisView): void {
|
||||
this.views.delete(view.variantAnalysisId);
|
||||
}
|
||||
|
||||
public getView(variantAnalysisId: number): VariantAnalysisView | undefined {
|
||||
return this.views.get(variantAnalysisId);
|
||||
}
|
||||
|
||||
public async getVariantAnalysis(variantAnalysisId: number): Promise<VariantAnalysis | undefined> {
|
||||
return this.variantAnalyses.get(variantAnalysisId);
|
||||
}
|
||||
|
||||
public async loadResults(variantAnalysisId: number, repositoryFullName: string): Promise<void> {
|
||||
const variantAnalysis = this.variantAnalyses.get(variantAnalysisId);
|
||||
if (!variantAnalysis) {
|
||||
throw new Error(`No variant analysis with id: ${variantAnalysisId}`);
|
||||
}
|
||||
|
||||
await this.variantAnalysisResultsManager.loadResults(variantAnalysisId, repositoryFullName);
|
||||
}
|
||||
|
||||
private async onVariantAnalysisUpdated(variantAnalysis: VariantAnalysis | undefined): Promise<void> {
|
||||
if (!variantAnalysis) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.variantAnalyses.set(variantAnalysis.id, variantAnalysis);
|
||||
|
||||
await this.getView(variantAnalysis.id)?.updateView(variantAnalysis);
|
||||
}
|
||||
|
||||
public onVariantAnalysisSubmitted(variantAnalysis: VariantAnalysis): void {
|
||||
this._onVariantAnalysisAdded.fire(variantAnalysis);
|
||||
}
|
||||
|
||||
private async onRepoResultLoaded(repositoryResult: VariantAnalysisScannedRepositoryResult): Promise<void> {
|
||||
await this.getView(repositoryResult.variantAnalysisId)?.sendRepositoryResults([repositoryResult]);
|
||||
}
|
||||
|
||||
private async onRepoStateUpdated(variantAnalysisId: number, repoState: VariantAnalysisScannedRepositoryState): Promise<void> {
|
||||
await this.getView(variantAnalysisId)?.updateRepoState(repoState);
|
||||
}
|
||||
|
||||
public async monitorVariantAnalysis(
|
||||
variantAnalysis: VariantAnalysis,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<void> {
|
||||
await this.variantAnalysisMonitor.monitorVariantAnalysis(variantAnalysis, cancellationToken);
|
||||
}
|
||||
|
||||
public async autoDownloadVariantAnalysisResult(
|
||||
scannedRepo: ApiVariantAnalysisScannedRepository,
|
||||
variantAnalysisSummary: VariantAnalysisApiResponse,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<void> {
|
||||
const repoState = {
|
||||
repositoryId: scannedRepo.repository.id,
|
||||
downloadStatus: VariantAnalysisScannedRepositoryDownloadStatus.Pending,
|
||||
};
|
||||
|
||||
await this.onRepoStateUpdated(variantAnalysisSummary.id, repoState);
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
if (!credentials) { throw Error('Error authenticating with GitHub'); }
|
||||
|
||||
if (cancellationToken && cancellationToken.isCancellationRequested) {
|
||||
repoState.downloadStatus = VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||
await this.onRepoStateUpdated(variantAnalysisSummary.id, repoState);
|
||||
return;
|
||||
}
|
||||
|
||||
let repoTask: VariantAnalysisRepoTask;
|
||||
try {
|
||||
repoTask = await ghApiClient.getVariantAnalysisRepo(
|
||||
credentials,
|
||||
variantAnalysisSummary.controller_repo.id,
|
||||
variantAnalysisSummary.id,
|
||||
scannedRepo.repository.id
|
||||
);
|
||||
} catch (e) {
|
||||
repoState.downloadStatus = VariantAnalysisScannedRepositoryDownloadStatus.Failed;
|
||||
await this.onRepoStateUpdated(variantAnalysisSummary.id, repoState);
|
||||
throw new Error(`Could not download the results for variant analysis with id: ${variantAnalysisSummary.id}. Error: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
if (repoTask.artifact_url) {
|
||||
repoState.downloadStatus = VariantAnalysisScannedRepositoryDownloadStatus.InProgress;
|
||||
await this.onRepoStateUpdated(variantAnalysisSummary.id, repoState);
|
||||
|
||||
await this.variantAnalysisResultsManager.download(credentials, variantAnalysisSummary.id, repoTask);
|
||||
}
|
||||
|
||||
repoState.downloadStatus = VariantAnalysisScannedRepositoryDownloadStatus.Succeeded;
|
||||
await this.onRepoStateUpdated(variantAnalysisSummary.id, repoState);
|
||||
}
|
||||
|
||||
public async promptOpenVariantAnalysis() {
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
if (!credentials) { throw Error('Error authenticating with GitHub'); }
|
||||
|
||||
const controllerRepo = await getControllerRepo(credentials);
|
||||
|
||||
const variantAnalysisIdString = await window.showInputBox({
|
||||
title: 'Enter the variant analysis ID',
|
||||
});
|
||||
if (!variantAnalysisIdString) {
|
||||
return;
|
||||
}
|
||||
const variantAnalysisId = parseInt(variantAnalysisIdString, 10);
|
||||
|
||||
const variantAnalysisResponse = await ghApiClient.getVariantAnalysis(credentials, controllerRepo.id, variantAnalysisId);
|
||||
|
||||
const processedVariantAnalysis = processUpdatedVariantAnalysis({
|
||||
// We don't really know these values, so just fill in some placeholder values
|
||||
query: {
|
||||
name: `Variant analysis ${variantAnalysisId}`,
|
||||
filePath: `variant_analysis_${variantAnalysisId}.ql`,
|
||||
language: variantAnalysisResponse.query_language as VariantAnalysisQueryLanguage,
|
||||
text: '',
|
||||
},
|
||||
databases: {},
|
||||
executionStartTime: 0,
|
||||
}, variantAnalysisResponse);
|
||||
|
||||
void commands.executeCommand('codeQL.openVariantAnalysisView', processedVariantAnalysis.id);
|
||||
void commands.executeCommand('codeQL.monitorVariantAnalysis', processedVariantAnalysis);
|
||||
|
||||
this._onVariantAnalysisAdded.fire(processedVariantAnalysis);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
import { ExtensionContext, CancellationToken, commands, EventEmitter } from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import * as ghApiClient from './gh-api/gh-api-client';
|
||||
|
||||
import { VariantAnalysis, VariantAnalysisStatus } from './shared/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis as VariantAnalysisApiResponse
|
||||
} from './gh-api/variant-analysis';
|
||||
import { VariantAnalysisMonitorResult } from './shared/variant-analysis-monitor-result';
|
||||
import { processFailureReason, processUpdatedVariantAnalysis } from './variant-analysis-processor';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
|
||||
export class VariantAnalysisMonitor extends DisposableObject {
|
||||
// With a sleep of 5 seconds, the maximum number of attempts takes
|
||||
// us to just over 2 days worth of monitoring.
|
||||
public static maxAttemptCount = 17280;
|
||||
public static sleepTime = 5000;
|
||||
|
||||
private readonly _onVariantAnalysisChange = this.push(new EventEmitter<VariantAnalysis | undefined>());
|
||||
readonly onVariantAnalysisChange = this._onVariantAnalysisChange.event;
|
||||
|
||||
constructor(
|
||||
private readonly extensionContext: ExtensionContext,
|
||||
private readonly logger: Logger
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public async monitorVariantAnalysis(
|
||||
variantAnalysis: VariantAnalysis,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<VariantAnalysisMonitorResult> {
|
||||
|
||||
const credentials = await Credentials.initialize(this.extensionContext);
|
||||
if (!credentials) {
|
||||
throw Error('Error authenticating with GitHub');
|
||||
}
|
||||
|
||||
let variantAnalysisSummary: VariantAnalysisApiResponse;
|
||||
let attemptCount = 0;
|
||||
const scannedReposDownloaded: number[] = [];
|
||||
|
||||
this._onVariantAnalysisChange.fire(variantAnalysis);
|
||||
|
||||
while (attemptCount <= VariantAnalysisMonitor.maxAttemptCount) {
|
||||
await this.sleep(VariantAnalysisMonitor.sleepTime);
|
||||
|
||||
if (cancellationToken && cancellationToken.isCancellationRequested) {
|
||||
return { status: 'Cancelled', error: 'Variant Analysis was canceled.' };
|
||||
}
|
||||
|
||||
variantAnalysisSummary = await ghApiClient.getVariantAnalysis(
|
||||
credentials,
|
||||
variantAnalysis.controllerRepoId,
|
||||
variantAnalysis.id
|
||||
);
|
||||
|
||||
if (variantAnalysisSummary.failure_reason) {
|
||||
variantAnalysis.status = VariantAnalysisStatus.Failed;
|
||||
variantAnalysis.failureReason = processFailureReason(variantAnalysisSummary.failure_reason);
|
||||
|
||||
this._onVariantAnalysisChange.fire(variantAnalysis);
|
||||
|
||||
return {
|
||||
status: 'Failed',
|
||||
error: `Variant Analysis has failed: ${variantAnalysisSummary.failure_reason}`,
|
||||
variantAnalysis: variantAnalysis
|
||||
};
|
||||
}
|
||||
|
||||
variantAnalysis = processUpdatedVariantAnalysis(variantAnalysis, variantAnalysisSummary);
|
||||
|
||||
this._onVariantAnalysisChange.fire(variantAnalysis);
|
||||
|
||||
void this.logger.log('****** Retrieved variant analysis' + JSON.stringify(variantAnalysisSummary));
|
||||
|
||||
if (variantAnalysisSummary.scanned_repositories) {
|
||||
variantAnalysisSummary.scanned_repositories.forEach(scannedRepo => {
|
||||
if (!scannedReposDownloaded.includes(scannedRepo.repository.id) && scannedRepo.analysis_status === 'succeeded') {
|
||||
void commands.executeCommand('codeQL.autoDownloadVariantAnalysisResult', scannedRepo, variantAnalysisSummary);
|
||||
scannedReposDownloaded.push(scannedRepo.repository.id);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (variantAnalysisSummary.status === 'completed') {
|
||||
break;
|
||||
}
|
||||
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
return { status: 'CompletedSuccessfully', scannedReposDownloaded: scannedReposDownloaded };
|
||||
}
|
||||
|
||||
private async sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
import {
|
||||
VariantAnalysis as ApiVariantAnalysis,
|
||||
VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository,
|
||||
VariantAnalysisSkippedRepositories as ApiVariantAnalysisSkippedRepositories,
|
||||
VariantAnalysisRepoStatus as ApiVariantAnalysisRepoStatus,
|
||||
VariantAnalysisFailureReason as ApiVariantAnalysisFailureReason,
|
||||
VariantAnalysisStatus as ApiVariantAnalysisStatus,
|
||||
VariantAnalysisSkippedRepositoryGroup as ApiVariantAnalysisSkippedRepositoryGroup,
|
||||
VariantAnalysisNotFoundRepositoryGroup as ApiVariantAnalysisNotFoundRepositoryGroup
|
||||
} from './gh-api/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisFailureReason,
|
||||
VariantAnalysisScannedRepository,
|
||||
VariantAnalysisSkippedRepositories,
|
||||
VariantAnalysisStatus,
|
||||
VariantAnalysisRepoStatus,
|
||||
VariantAnalysisSubmission,
|
||||
VariantAnalysisSkippedRepositoryGroup
|
||||
} from './shared/variant-analysis';
|
||||
|
||||
export function processVariantAnalysis(
|
||||
submission: VariantAnalysisSubmission,
|
||||
response: ApiVariantAnalysis
|
||||
): VariantAnalysis {
|
||||
return processUpdatedVariantAnalysis({
|
||||
query: {
|
||||
name: submission.query.name,
|
||||
filePath: submission.query.filePath,
|
||||
language: submission.query.language,
|
||||
text: submission.query.text,
|
||||
},
|
||||
databases: submission.databases,
|
||||
executionStartTime: submission.startTime
|
||||
}, response);
|
||||
}
|
||||
|
||||
export function processUpdatedVariantAnalysis(
|
||||
previousVariantAnalysis: Pick<VariantAnalysis, 'query' | 'databases' | 'executionStartTime'>,
|
||||
response: ApiVariantAnalysis
|
||||
): VariantAnalysis {
|
||||
let scannedRepos: VariantAnalysisScannedRepository[] = [];
|
||||
let skippedRepos: VariantAnalysisSkippedRepositories = {};
|
||||
|
||||
if (response.scanned_repositories) {
|
||||
scannedRepos = processScannedRepositories(response.scanned_repositories as ApiVariantAnalysisScannedRepository[]);
|
||||
}
|
||||
|
||||
if (response.skipped_repositories) {
|
||||
skippedRepos = processSkippedRepositories(response.skipped_repositories as ApiVariantAnalysisSkippedRepositories);
|
||||
}
|
||||
|
||||
const variantAnalysis: VariantAnalysis = {
|
||||
id: response.id,
|
||||
controllerRepoId: response.controller_repo.id,
|
||||
query: previousVariantAnalysis.query,
|
||||
databases: previousVariantAnalysis.databases,
|
||||
executionStartTime: previousVariantAnalysis.executionStartTime,
|
||||
createdAt: response.created_at,
|
||||
updatedAt: response.updated_at,
|
||||
status: processApiStatus(response.status),
|
||||
completedAt: response.completed_at,
|
||||
actionsWorkflowRunId: response.actions_workflow_run_id,
|
||||
scannedRepos: scannedRepos,
|
||||
skippedRepos: skippedRepos
|
||||
};
|
||||
|
||||
if (response.failure_reason) {
|
||||
variantAnalysis.failureReason = processFailureReason(response.failure_reason);
|
||||
}
|
||||
|
||||
return variantAnalysis;
|
||||
}
|
||||
|
||||
function processScannedRepositories(
|
||||
scannedRepos: ApiVariantAnalysisScannedRepository[]
|
||||
): VariantAnalysisScannedRepository[] {
|
||||
return scannedRepos.map(scannedRepo => {
|
||||
return {
|
||||
repository: {
|
||||
id: scannedRepo.repository.id,
|
||||
fullName: scannedRepo.repository.full_name,
|
||||
private: scannedRepo.repository.private,
|
||||
},
|
||||
analysisStatus: processApiRepoStatus(scannedRepo.analysis_status),
|
||||
resultCount: scannedRepo.result_count,
|
||||
artifactSizeInBytes: scannedRepo.artifact_size_in_bytes,
|
||||
failureMessage: scannedRepo.failure_message
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function processSkippedRepositories(
|
||||
skippedRepos: ApiVariantAnalysisSkippedRepositories
|
||||
): VariantAnalysisSkippedRepositories {
|
||||
|
||||
return {
|
||||
accessMismatchRepos: processRepoGroup(skippedRepos.access_mismatch_repos),
|
||||
notFoundRepos: processNotFoundRepoGroup(skippedRepos.not_found_repo_nwos),
|
||||
noCodeqlDbRepos: processRepoGroup(skippedRepos.no_codeql_db_repos),
|
||||
overLimitRepos: processRepoGroup(skippedRepos.over_limit_repos)
|
||||
};
|
||||
}
|
||||
|
||||
function processRepoGroup(repoGroup: ApiVariantAnalysisSkippedRepositoryGroup | undefined): VariantAnalysisSkippedRepositoryGroup | undefined {
|
||||
if (!repoGroup) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const repos = repoGroup.repositories.map(repo => {
|
||||
return {
|
||||
id: repo.id,
|
||||
fullName: repo.full_name
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
repositoryCount: repoGroup.repository_count,
|
||||
repositories: repos
|
||||
};
|
||||
}
|
||||
|
||||
function processNotFoundRepoGroup(repoGroup: ApiVariantAnalysisNotFoundRepositoryGroup | undefined): VariantAnalysisSkippedRepositoryGroup | undefined {
|
||||
if (!repoGroup) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const repo_full_names = repoGroup.repository_full_names.map(nwo => {
|
||||
return {
|
||||
fullName: nwo
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
repositoryCount: repoGroup.repository_count,
|
||||
repositories: repo_full_names
|
||||
};
|
||||
}
|
||||
|
||||
function processApiRepoStatus(analysisStatus: ApiVariantAnalysisRepoStatus): VariantAnalysisRepoStatus {
|
||||
switch (analysisStatus) {
|
||||
case 'pending':
|
||||
return VariantAnalysisRepoStatus.Pending;
|
||||
case 'in_progress':
|
||||
return VariantAnalysisRepoStatus.InProgress;
|
||||
case 'succeeded':
|
||||
return VariantAnalysisRepoStatus.Succeeded;
|
||||
case 'failed':
|
||||
return VariantAnalysisRepoStatus.Failed;
|
||||
case 'canceled':
|
||||
return VariantAnalysisRepoStatus.Canceled;
|
||||
case 'timed_out':
|
||||
return VariantAnalysisRepoStatus.TimedOut;
|
||||
}
|
||||
}
|
||||
|
||||
function processApiStatus(status: ApiVariantAnalysisStatus): VariantAnalysisStatus {
|
||||
switch (status) {
|
||||
case 'in_progress':
|
||||
return VariantAnalysisStatus.InProgress;
|
||||
case 'completed':
|
||||
return VariantAnalysisStatus.Succeeded;
|
||||
}
|
||||
}
|
||||
|
||||
export function processFailureReason(failureReason: ApiVariantAnalysisFailureReason): VariantAnalysisFailureReason {
|
||||
switch (failureReason) {
|
||||
case 'no_repos_queried':
|
||||
return VariantAnalysisFailureReason.NoReposQueried;
|
||||
case 'internal_error':
|
||||
return VariantAnalysisFailureReason.InternalError;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { AnalysisAlert, AnalysisRawResults } from './shared/analysis-result';
|
||||
import { sarifParser } from '../sarif-parser';
|
||||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { VariantAnalysisScannedRepositoryResult } from './shared/variant-analysis';
|
||||
import { DisposableObject, DisposeHandler } from '../pure/disposable-object';
|
||||
import { VariantAnalysisRepoTask } from './gh-api/variant-analysis';
|
||||
import * as ghApiClient from './gh-api/gh-api-client';
|
||||
import { EventEmitter } from 'vscode';
|
||||
import { unzipFile } from '../pure/zip';
|
||||
|
||||
type CacheKey = `${number}/${string}`;
|
||||
|
||||
const createCacheKey = (variantAnalysisId: number, repositoryFullName: string): CacheKey => `${variantAnalysisId}/${repositoryFullName}`;
|
||||
|
||||
export type ResultDownloadedEvent = {
|
||||
variantAnalysisId: number;
|
||||
repoTask: VariantAnalysisRepoTask;
|
||||
}
|
||||
|
||||
export class VariantAnalysisResultsManager extends DisposableObject {
|
||||
private static readonly REPO_TASK_FILENAME = 'repo_task.json';
|
||||
private static readonly RESULTS_DIRECTORY = 'results';
|
||||
|
||||
private readonly cachedResults: Map<CacheKey, VariantAnalysisScannedRepositoryResult>;
|
||||
|
||||
private readonly _onResultDownloaded = this.push(new EventEmitter<ResultDownloadedEvent>());
|
||||
readonly onResultDownloaded = this._onResultDownloaded.event;
|
||||
|
||||
private readonly _onResultLoaded = this.push(new EventEmitter<VariantAnalysisScannedRepositoryResult>());
|
||||
readonly onResultLoaded = this._onResultLoaded.event;
|
||||
|
||||
constructor(
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly storagePath: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
super();
|
||||
this.cachedResults = new Map();
|
||||
}
|
||||
|
||||
public async download(
|
||||
credentials: Credentials,
|
||||
variantAnalysisId: number,
|
||||
repoTask: VariantAnalysisRepoTask,
|
||||
): Promise<void> {
|
||||
if (!repoTask.artifact_url) {
|
||||
throw new Error('Missing artifact URL');
|
||||
}
|
||||
|
||||
const resultDirectory = this.getRepoStorageDirectory(variantAnalysisId, repoTask.repository.full_name);
|
||||
|
||||
const result = await ghApiClient.getVariantAnalysisRepoResult(
|
||||
credentials,
|
||||
repoTask.artifact_url
|
||||
);
|
||||
|
||||
if (!(await fs.pathExists(resultDirectory))) {
|
||||
await fs.mkdir(resultDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
await fs.outputJson(path.join(resultDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME), repoTask);
|
||||
|
||||
const zipFilePath = path.join(resultDirectory, 'results.zip');
|
||||
const unzippedFilesDirectory = path.join(resultDirectory, VariantAnalysisResultsManager.RESULTS_DIRECTORY);
|
||||
|
||||
fs.writeFileSync(zipFilePath, Buffer.from(result));
|
||||
await unzipFile(zipFilePath, unzippedFilesDirectory);
|
||||
|
||||
this._onResultDownloaded.fire({
|
||||
variantAnalysisId,
|
||||
repoTask,
|
||||
});
|
||||
}
|
||||
|
||||
public async loadResults(
|
||||
variantAnalysisId: number,
|
||||
repositoryFullName: string
|
||||
): Promise<VariantAnalysisScannedRepositoryResult> {
|
||||
const result = this.cachedResults.get(createCacheKey(variantAnalysisId, repositoryFullName));
|
||||
|
||||
return result ?? await this.loadResultsIntoMemory(variantAnalysisId, repositoryFullName);
|
||||
}
|
||||
|
||||
private async loadResultsIntoMemory(
|
||||
variantAnalysisId: number,
|
||||
repositoryFullName: string,
|
||||
): Promise<VariantAnalysisScannedRepositoryResult> {
|
||||
const result = await this.loadResultsFromStorage(variantAnalysisId, repositoryFullName);
|
||||
this.cachedResults.set(createCacheKey(variantAnalysisId, repositoryFullName), result);
|
||||
this._onResultLoaded.fire(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private async loadResultsFromStorage(
|
||||
variantAnalysisId: number,
|
||||
repositoryFullName: string,
|
||||
): Promise<VariantAnalysisScannedRepositoryResult> {
|
||||
if (!(await this.isVariantAnalysisRepoDownloaded(variantAnalysisId, repositoryFullName))) {
|
||||
throw new Error('Variant analysis results not downloaded');
|
||||
}
|
||||
|
||||
const storageDirectory = this.getRepoStorageDirectory(variantAnalysisId, repositoryFullName);
|
||||
|
||||
const repoTask: VariantAnalysisRepoTask = await fs.readJson(path.join(storageDirectory, VariantAnalysisResultsManager.REPO_TASK_FILENAME));
|
||||
|
||||
if (!repoTask.database_commit_sha || !repoTask.source_location_prefix) {
|
||||
throw new Error('Missing database commit SHA');
|
||||
}
|
||||
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(repoTask.repository.full_name, repoTask.database_commit_sha);
|
||||
|
||||
const resultsDirectory = path.join(storageDirectory, VariantAnalysisResultsManager.RESULTS_DIRECTORY);
|
||||
const sarifPath = path.join(resultsDirectory, 'results.sarif');
|
||||
const bqrsPath = path.join(resultsDirectory, 'results.bqrs');
|
||||
if (await fs.pathExists(sarifPath)) {
|
||||
const interpretedResults = await this.readSarifResults(sarifPath, fileLinkPrefix);
|
||||
|
||||
return {
|
||||
variantAnalysisId,
|
||||
repositoryId: repoTask.repository.id,
|
||||
interpretedResults,
|
||||
};
|
||||
}
|
||||
|
||||
if (await fs.pathExists(bqrsPath)) {
|
||||
const rawResults = await this.readBqrsResults(bqrsPath, fileLinkPrefix, repoTask.source_location_prefix);
|
||||
|
||||
return {
|
||||
variantAnalysisId,
|
||||
repositoryId: repoTask.repository.id,
|
||||
rawResults,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Missing results file');
|
||||
}
|
||||
|
||||
private async isVariantAnalysisRepoDownloaded(
|
||||
variantAnalysisId: number,
|
||||
repositoryFullName: string,
|
||||
): Promise<boolean> {
|
||||
return await fs.pathExists(this.getRepoStorageDirectory(variantAnalysisId, repositoryFullName));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string, sourceLocationPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix, sourceLocationPrefix);
|
||||
}
|
||||
|
||||
private async readSarifResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisAlert[]> {
|
||||
const sarifLog = await sarifParser(filePath);
|
||||
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(os.EOL)}`);
|
||||
}
|
||||
|
||||
return processedSarif.alerts;
|
||||
}
|
||||
|
||||
private getStorageDirectory(variantAnalysisId: number): string {
|
||||
return path.join(
|
||||
this.storagePath,
|
||||
`${variantAnalysisId}`
|
||||
);
|
||||
}
|
||||
|
||||
public getRepoStorageDirectory(variantAnalysisId: number, fullName: string): string {
|
||||
return path.join(
|
||||
this.getStorageDirectory(variantAnalysisId),
|
||||
fullName
|
||||
);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(fullName: string, sha: string): string {
|
||||
return `https://github.com/${fullName}/blob/${sha}`;
|
||||
}
|
||||
|
||||
public dispose(disposeHandler?: DisposeHandler) {
|
||||
super.dispose(disposeHandler);
|
||||
|
||||
this.cachedResults.clear();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { VariantAnalysis } from './shared/variant-analysis';
|
||||
|
||||
export interface VariantAnalysisViewInterface {
|
||||
variantAnalysisId: number;
|
||||
openView(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface VariantAnalysisViewManager<T extends VariantAnalysisViewInterface> {
|
||||
registerView(view: T): void;
|
||||
unregisterView(view: T): void;
|
||||
|
||||
getVariantAnalysis(variantAnalysisId: number): Promise<VariantAnalysis | undefined>;
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
import { ExtensionContext, WebviewPanel, WebviewPanelSerializer } from 'vscode';
|
||||
import { VariantAnalysisView } from './variant-analysis-view';
|
||||
import { VariantAnalysisState } from '../pure/interface-types';
|
||||
import { VariantAnalysisViewManager } from './variant-analysis-view-manager';
|
||||
|
||||
export class VariantAnalysisViewSerializer implements WebviewPanelSerializer {
|
||||
private resolvePromises: ((value: VariantAnalysisViewManager<VariantAnalysisView>) => void)[] = [];
|
||||
|
||||
private manager?: VariantAnalysisViewManager<VariantAnalysisView>;
|
||||
|
||||
public constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
) { }
|
||||
|
||||
onExtensionLoaded(manager: VariantAnalysisViewManager<VariantAnalysisView>): void {
|
||||
this.manager = manager;
|
||||
|
||||
this.resolvePromises.forEach((resolve) => resolve(manager));
|
||||
this.resolvePromises = [];
|
||||
}
|
||||
|
||||
async deserializeWebviewPanel(webviewPanel: WebviewPanel, state: unknown): Promise<void> {
|
||||
if (!state || typeof state !== 'object') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!('variantAnalysisId' in state)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const variantAnalysisState: VariantAnalysisState = state as VariantAnalysisState;
|
||||
|
||||
const manager = await this.waitForExtensionFullyLoaded();
|
||||
|
||||
const view = new VariantAnalysisView(this.ctx, variantAnalysisState.variantAnalysisId, manager);
|
||||
await view.restoreView(webviewPanel);
|
||||
}
|
||||
|
||||
private waitForExtensionFullyLoaded(): Promise<VariantAnalysisViewManager<VariantAnalysisView>> {
|
||||
if (this.manager) {
|
||||
return Promise.resolve(this.manager);
|
||||
}
|
||||
|
||||
return new Promise<VariantAnalysisViewManager<VariantAnalysisView>>((resolve) => {
|
||||
this.resolvePromises.push(resolve);
|
||||
});
|
||||
}
|
||||
}
|
||||
162
extensions/ql-vscode/src/remote-queries/variant-analysis-view.ts
Normal file
162
extensions/ql-vscode/src/remote-queries/variant-analysis-view.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { commands, ExtensionContext, Uri, ViewColumn, window as Window, workspace } from 'vscode';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
|
||||
import { logger } from '../logging';
|
||||
import { FromVariantAnalysisMessage, ToVariantAnalysisMessage } from '../pure/interface-types';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from './shared/variant-analysis';
|
||||
import { VariantAnalysisViewInterface, VariantAnalysisViewManager } from './variant-analysis-view-manager';
|
||||
import { showAndLogWarningMessage } from '../helpers';
|
||||
|
||||
export class VariantAnalysisView extends AbstractWebview<ToVariantAnalysisMessage, FromVariantAnalysisMessage> implements VariantAnalysisViewInterface {
|
||||
public static readonly viewType = 'codeQL.variantAnalysis';
|
||||
|
||||
public constructor(
|
||||
ctx: ExtensionContext,
|
||||
public readonly variantAnalysisId: number,
|
||||
private readonly manager: VariantAnalysisViewManager<VariantAnalysisView>,
|
||||
) {
|
||||
super(ctx);
|
||||
|
||||
manager.registerView(this);
|
||||
}
|
||||
|
||||
public async openView() {
|
||||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
}
|
||||
|
||||
public async updateView(variantAnalysis: VariantAnalysis): Promise<void> {
|
||||
if (!this.isShowingPanel) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setVariantAnalysis',
|
||||
variantAnalysis,
|
||||
});
|
||||
}
|
||||
|
||||
public async updateRepoState(repoState: VariantAnalysisScannedRepositoryState): Promise<void> {
|
||||
if (!this.isShowingPanel) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setRepoStates',
|
||||
repoStates: [repoState],
|
||||
});
|
||||
}
|
||||
|
||||
public async sendRepositoryResults(repositoryResult: VariantAnalysisScannedRepositoryResult[]): Promise<void> {
|
||||
if (!this.isShowingPanel) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setRepoResults',
|
||||
repoResults: repositoryResult,
|
||||
});
|
||||
}
|
||||
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: VariantAnalysisView.viewType,
|
||||
title: `CodeQL Query Results for ${this.variantAnalysisId}`,
|
||||
viewColumn: ViewColumn.Active,
|
||||
preserveFocus: true,
|
||||
view: 'variant-analysis',
|
||||
};
|
||||
}
|
||||
|
||||
protected onPanelDispose(): void {
|
||||
this.manager.unregisterView(this);
|
||||
}
|
||||
|
||||
protected async onMessage(msg: FromVariantAnalysisMessage): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'viewLoaded':
|
||||
await this.onWebViewLoaded();
|
||||
|
||||
break;
|
||||
case 'stopVariantAnalysis':
|
||||
void logger.log(`Stop variant analysis: ${msg.variantAnalysisId}`);
|
||||
break;
|
||||
case 'requestRepositoryResults':
|
||||
void commands.executeCommand('codeQL.loadVariantAnalysisRepoResults', this.variantAnalysisId, msg.repositoryFullName);
|
||||
break;
|
||||
case 'openQueryFile':
|
||||
await this.openQueryFile();
|
||||
break;
|
||||
case 'openQueryText':
|
||||
await this.openQueryText();
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
}
|
||||
|
||||
protected async onWebViewLoaded() {
|
||||
super.onWebViewLoaded();
|
||||
|
||||
void logger.log('Variant analysis view loaded');
|
||||
|
||||
const variantAnalysis = await this.manager.getVariantAnalysis(this.variantAnalysisId);
|
||||
|
||||
if (!variantAnalysis) {
|
||||
void showAndLogWarningMessage('Unable to load variant analysis');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setVariantAnalysis',
|
||||
variantAnalysis,
|
||||
});
|
||||
}
|
||||
|
||||
private async openQueryFile(): Promise<void> {
|
||||
const variantAnalysis = await this.manager.getVariantAnalysis(this.variantAnalysisId);
|
||||
|
||||
if (!variantAnalysis) {
|
||||
void showAndLogWarningMessage('Could not open variant analysis query file');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const textDocument = await workspace.openTextDocument(variantAnalysis.query.filePath);
|
||||
await Window.showTextDocument(textDocument, ViewColumn.One);
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage(`Could not open file: ${variantAnalysis.query.filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async openQueryText(): Promise<void> {
|
||||
const variantAnalysis = await this.manager.getVariantAnalysis(this.variantAnalysisId);
|
||||
if (!variantAnalysis) {
|
||||
void showAndLogWarningMessage('Could not open variant analysis query text. Variant analysis not found.');
|
||||
return;
|
||||
}
|
||||
|
||||
const filename = variantAnalysis.query.filePath;
|
||||
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
variantAnalysisId: variantAnalysis.id.toString(),
|
||||
});
|
||||
const uri = Uri.from({
|
||||
scheme: 'codeql-variant-analysis',
|
||||
path: filename,
|
||||
query: params.toString(),
|
||||
});
|
||||
const doc = await workspace.openTextDocument(uri);
|
||||
await Window.showTextDocument(doc, { preview: false });
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage('Could not open variant analysis query text. Failed to open text document.');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,178 +0,0 @@
|
||||
import { XCircleIcon } from '@primer/octicons-react';
|
||||
import { Overlay } from '@primer/react';
|
||||
import { VSCodeDropdown, VSCodeLink, VSCodeOption, VSCodeTag } from '@vscode/webview-ui-toolkit/react';
|
||||
import * as React from 'react';
|
||||
import { ChangeEvent, useRef, useState } from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeFlow, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import FileCodeSnippet from './FileCodeSnippet';
|
||||
import SectionTitle from './SectionTitle';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
|
||||
const StyledCloseButton = styled.button`
|
||||
position: absolute;
|
||||
top: 1em;
|
||||
right: 4em;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
border: none;
|
||||
&:focus-visible {
|
||||
outline: none
|
||||
}
|
||||
`;
|
||||
|
||||
const OverlayContainer = styled.div`
|
||||
padding: 1em;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
padding: 2em;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
background-color: var(--vscode-editor-background);
|
||||
color: var(--vscode-editor-foreground);
|
||||
overflow-y: scroll;
|
||||
`;
|
||||
|
||||
const CloseButton = ({ onClick }: { onClick: () => void }) => (
|
||||
<StyledCloseButton onClick={onClick} tabIndex={-1} >
|
||||
<XCircleIcon size={24} />
|
||||
</StyledCloseButton>
|
||||
);
|
||||
|
||||
const CodePath = ({
|
||||
codeFlow,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlow: CodeFlow;
|
||||
message: AnalysisMessage;
|
||||
severity: ResultSeverity;
|
||||
}) => {
|
||||
return <>
|
||||
{codeFlow.threadFlows.map((threadFlow, index) =>
|
||||
<div key={`thread-flow-${index}`} style={{ maxWidth: '55em' }}>
|
||||
{index !== 0 && <VerticalSpace size={3} />}
|
||||
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center' }}>
|
||||
<div style={{ flexGrow: 1, padding: 0, border: 'none' }}>
|
||||
<SectionTitle>Step {index + 1}</SectionTitle>
|
||||
</div>
|
||||
{index === 0 &&
|
||||
<div style={{ padding: 0, border: 'none' }}>
|
||||
<VSCodeTag>Source</VSCodeTag>
|
||||
</div>
|
||||
}
|
||||
{index === codeFlow.threadFlows.length - 1 &&
|
||||
<div style={{ padding: 0, border: 'none' }}>
|
||||
<VSCodeTag>Sink</VSCodeTag>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<FileCodeSnippet
|
||||
fileLink={threadFlow.fileLink}
|
||||
codeSnippet={threadFlow.codeSnippet}
|
||||
highlightedRegion={threadFlow.highlightedRegion}
|
||||
severity={severity}
|
||||
message={index === codeFlow.threadFlows.length - 1 ? message : threadFlow.message} />
|
||||
</div>
|
||||
)}
|
||||
</>;
|
||||
};
|
||||
|
||||
const getCodeFlowName = (codeFlow: CodeFlow) => {
|
||||
const filePath = codeFlow.threadFlows[codeFlow.threadFlows.length - 1].fileLink.filePath;
|
||||
return filePath.substring(filePath.lastIndexOf('/') + 1);
|
||||
};
|
||||
|
||||
const Menu = ({
|
||||
codeFlows,
|
||||
setSelectedCodeFlow
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
setSelectedCodeFlow: (value: React.SetStateAction<CodeFlow>) => void
|
||||
}) => {
|
||||
return <VSCodeDropdown
|
||||
onChange={(event: ChangeEvent<HTMLSelectElement>) => {
|
||||
const selectedOption = event.target;
|
||||
const selectedIndex = selectedOption.value as unknown as number;
|
||||
setSelectedCodeFlow(codeFlows[selectedIndex]);
|
||||
}}
|
||||
>
|
||||
{codeFlows.map((codeFlow, index) =>
|
||||
<VSCodeOption
|
||||
key={`codeflow-${index}'`}
|
||||
value={index}
|
||||
>
|
||||
{getCodeFlowName(codeFlow)}
|
||||
</VSCodeOption>
|
||||
)}
|
||||
</VSCodeDropdown>;
|
||||
};
|
||||
|
||||
const CodePaths = ({
|
||||
codeFlows,
|
||||
ruleDescription,
|
||||
message,
|
||||
severity
|
||||
}: {
|
||||
codeFlows: CodeFlow[],
|
||||
ruleDescription: string,
|
||||
message: AnalysisMessage,
|
||||
severity: ResultSeverity
|
||||
}) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [selectedCodeFlow, setSelectedCodeFlow] = useState(codeFlows[0]);
|
||||
|
||||
const anchorRef = useRef<HTMLDivElement>(null);
|
||||
const linkRef = useRef<HTMLAnchorElement>(null);
|
||||
|
||||
const closeOverlay = () => setIsOpen(false);
|
||||
|
||||
return (
|
||||
<div ref={anchorRef}>
|
||||
<VSCodeLink
|
||||
onClick={() => setIsOpen(true)}
|
||||
ref={linkRef}
|
||||
sx={{ cursor: 'pointer' }}>
|
||||
Show paths
|
||||
</VSCodeLink>
|
||||
{isOpen && (
|
||||
<Overlay
|
||||
returnFocusRef={linkRef}
|
||||
onEscape={closeOverlay}
|
||||
onClickOutside={closeOverlay}
|
||||
anchorSide="outside-top">
|
||||
<OverlayContainer>
|
||||
<CloseButton onClick={closeOverlay} />
|
||||
|
||||
<SectionTitle>{ruleDescription}</SectionTitle>
|
||||
<VerticalSpace size={2} />
|
||||
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center' }}>
|
||||
<div style={{ padding: 0, border: 0 }}>
|
||||
{codeFlows.length} paths available: {selectedCodeFlow.threadFlows.length} steps in
|
||||
</div>
|
||||
<div style={{ flexGrow: 1, padding: 0, paddingLeft: '0.2em', border: 'none' }}>
|
||||
<Menu codeFlows={codeFlows} setSelectedCodeFlow={setSelectedCodeFlow} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<CodePath
|
||||
codeFlow={selectedCodeFlow}
|
||||
severity={severity}
|
||||
message={message} />
|
||||
|
||||
<VerticalSpace size={3} />
|
||||
|
||||
</OverlayContainer>
|
||||
</Overlay>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default CodePaths;
|
||||
@@ -1,261 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { CodeSnippet, FileLink, HighlightedRegion, AnalysisMessage, ResultSeverity } from '../shared/analysis-result';
|
||||
import VerticalSpace from './VerticalSpace';
|
||||
import { createRemoteFileRef } from '../../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../../pure/sarif-utils';
|
||||
import { VSCodeLink } from '@vscode/webview-ui-toolkit/react';
|
||||
|
||||
const borderColor = 'var(--vscode-editor-snippetFinalTabstopHighlightBorder)';
|
||||
const warningColor = '#966C23';
|
||||
const highlightColor = 'var(--vscode-editor-findMatchHighlightBackground)';
|
||||
|
||||
const getSeverityColor = (severity: ResultSeverity) => {
|
||||
switch (severity) {
|
||||
case 'Recommendation':
|
||||
return 'blue';
|
||||
case 'Warning':
|
||||
return warningColor;
|
||||
case 'Error':
|
||||
return 'red';
|
||||
}
|
||||
};
|
||||
|
||||
const replaceSpaceAndTabChar = (text: string) => text.replaceAll(' ', '\u00a0').replaceAll('\t', '\u00a0\u00a0\u00a0\u00a0');
|
||||
|
||||
const Container = styled.div`
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
font-size: small;
|
||||
`;
|
||||
|
||||
const TitleContainer = styled.div`
|
||||
border: 0.1em solid ${borderColor};
|
||||
border-top-left-radius: 0.2em;
|
||||
border-top-right-radius: 0.2em;
|
||||
padding: 0.5em;
|
||||
`;
|
||||
|
||||
const CodeContainer = styled.div`
|
||||
border-left: 0.1em solid ${borderColor};
|
||||
border-right: 0.1em solid ${borderColor};
|
||||
border-bottom: 0.1em solid ${borderColor};
|
||||
border-bottom-left-radius: 0.2em;
|
||||
border-bottom-right-radius: 0.2em;
|
||||
padding-top: 1em;
|
||||
padding-bottom: 1em;
|
||||
`;
|
||||
|
||||
const MessageText = styled.div`
|
||||
font-size: small;
|
||||
padding-left: 0.5em;
|
||||
`;
|
||||
|
||||
const MessageContainer = styled.div`
|
||||
padding-top: 0.5em;
|
||||
padding-bottom: 0.5em;
|
||||
`;
|
||||
|
||||
const PlainCode = ({ text }: { text: string }) => {
|
||||
return <span>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const HighlightedCode = ({ text }: { text: string }) => {
|
||||
return <span style={{ backgroundColor: highlightColor }}>{replaceSpaceAndTabChar(text)}</span>;
|
||||
};
|
||||
|
||||
const Message = ({
|
||||
message,
|
||||
borderLeftColor,
|
||||
children
|
||||
}: {
|
||||
message: AnalysisMessage,
|
||||
borderLeftColor: string,
|
||||
children: React.ReactNode
|
||||
}) => {
|
||||
return <div style={{
|
||||
borderColor: borderColor,
|
||||
borderWidth: '0.1em',
|
||||
borderStyle: 'solid',
|
||||
borderLeftColor: borderLeftColor,
|
||||
borderLeftWidth: '0.3em',
|
||||
paddingTop: '1em',
|
||||
paddingBottom: '1em'
|
||||
}}>
|
||||
<MessageText>
|
||||
{message.tokens.map((token, index) => {
|
||||
switch (token.t) {
|
||||
case 'text':
|
||||
return <span key={`token-${index}`}>{token.text}</span>;
|
||||
case 'location':
|
||||
return <VSCodeLink
|
||||
style={{ fontFamily: 'var(--vscode-editor-font-family)' }}
|
||||
key={`token-${index}`}
|
||||
href={createRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine)}>
|
||||
{token.text}
|
||||
</VSCodeLink>;
|
||||
default:
|
||||
return <></>;
|
||||
}
|
||||
})}
|
||||
{children && <>
|
||||
<VerticalSpace size={2} />
|
||||
{children}
|
||||
</>
|
||||
}
|
||||
</MessageText>
|
||||
</div>;
|
||||
};
|
||||
|
||||
const Code = ({
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
}: {
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
}) => {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return <PlainCode text={line} />;
|
||||
}
|
||||
|
||||
const partiallyHighlightedLine = parseHighlightedLine(line, lineNumber, highlightedRegion);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PlainCode text={partiallyHighlightedLine.plainSection1} />
|
||||
<HighlightedCode text={partiallyHighlightedLine.highlightedSection} />
|
||||
<PlainCode text={partiallyHighlightedLine.plainSection2} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const Line = ({
|
||||
line,
|
||||
lineIndex,
|
||||
startingLineIndex,
|
||||
highlightedRegion,
|
||||
severity,
|
||||
message,
|
||||
messageChildren
|
||||
}: {
|
||||
line: string,
|
||||
lineIndex: number,
|
||||
startingLineIndex: number,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
const shouldShowMessage = message &&
|
||||
severity &&
|
||||
highlightedRegion &&
|
||||
highlightedRegion.endLine == startingLineIndex + lineIndex;
|
||||
|
||||
return <div>
|
||||
<div style={{ display: 'flex' }} >
|
||||
<div style={{
|
||||
borderStyle: 'none',
|
||||
paddingTop: '0.01em',
|
||||
paddingLeft: '0.5em',
|
||||
paddingRight: '0.5em',
|
||||
paddingBottom: '0.2em'
|
||||
}}>
|
||||
{startingLineIndex + lineIndex}
|
||||
</div>
|
||||
<div style={{
|
||||
flexGrow: 1,
|
||||
borderStyle: 'none',
|
||||
paddingTop: '0.01em',
|
||||
paddingLeft: '1.5em',
|
||||
paddingRight: '0.5em',
|
||||
paddingBottom: '0.2em',
|
||||
wordBreak: 'break-word'
|
||||
}}>
|
||||
<Code
|
||||
line={line}
|
||||
lineNumber={startingLineIndex + lineIndex}
|
||||
highlightedRegion={highlightedRegion} />
|
||||
</div>
|
||||
</div>
|
||||
{shouldShowMessage &&
|
||||
<MessageContainer>
|
||||
<Message
|
||||
message={message}
|
||||
borderLeftColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>
|
||||
</MessageContainer>
|
||||
}
|
||||
</div>;
|
||||
};
|
||||
|
||||
const FileCodeSnippet = ({
|
||||
fileLink,
|
||||
codeSnippet,
|
||||
highlightedRegion,
|
||||
severity,
|
||||
message,
|
||||
messageChildren,
|
||||
}: {
|
||||
fileLink: FileLink,
|
||||
codeSnippet?: CodeSnippet,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
|
||||
const startingLine = codeSnippet?.startLine || 0;
|
||||
const endingLine = codeSnippet?.endLine || 0;
|
||||
|
||||
const titleFileUri = createRemoteFileRef(
|
||||
fileLink,
|
||||
highlightedRegion?.startLine || startingLine,
|
||||
highlightedRegion?.endLine || endingLine);
|
||||
|
||||
if (!codeSnippet) {
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<VSCodeLink href={titleFileUri}>{fileLink.filePath}</VSCodeLink>
|
||||
</TitleContainer>
|
||||
{message && severity &&
|
||||
<Message
|
||||
message={message}
|
||||
borderLeftColor={getSeverityColor(severity)}>
|
||||
{messageChildren}
|
||||
</Message>}
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<VSCodeLink href={titleFileUri}>{fileLink.filePath}</VSCodeLink>
|
||||
</TitleContainer>
|
||||
<CodeContainer>
|
||||
{code.map((line, index) => (
|
||||
<Line
|
||||
key={`line-${index}`}
|
||||
line={line}
|
||||
lineIndex={index}
|
||||
startingLineIndex={startingLine}
|
||||
highlightedRegion={highlightedRegion}
|
||||
severity={severity}
|
||||
message={message}
|
||||
messageChildren={messageChildren}
|
||||
/>
|
||||
))}
|
||||
</CodeContainer>
|
||||
</Container>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileCodeSnippet;
|
||||
@@ -1,9 +0,0 @@
|
||||
import styled from 'styled-components';
|
||||
|
||||
const ViewTitle = styled.h1`
|
||||
font-size: large;
|
||||
margin-bottom: 0.5em;
|
||||
font-weight: 500;
|
||||
`;
|
||||
|
||||
export default ViewTitle;
|
||||
595
extensions/ql-vscode/src/run-queries-shared.ts
Normal file
595
extensions/ql-vscode/src/run-queries-shared.ts
Normal file
@@ -0,0 +1,595 @@
|
||||
import * as messages from './pure/messages-shared';
|
||||
import * as legacyMessages from './pure/legacy-messages';
|
||||
import { DatabaseInfo, QueryMetadata } from './pure/interface-types';
|
||||
import * as path from 'path';
|
||||
import { createTimestampFile, showAndLogWarningMessage } from './helpers';
|
||||
import {
|
||||
ConfigurationTarget,
|
||||
Range,
|
||||
TextDocument,
|
||||
TextEditor,
|
||||
Uri,
|
||||
window
|
||||
} from 'vscode';
|
||||
import * as config from './config';
|
||||
import { UserCancellationException } from './commandRunner';
|
||||
import * as fs from 'fs-extra';
|
||||
import { ensureMetadataIsComplete, InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { isQuickQueryPath } from './quick-query';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { SELECT_QUERY_NAME } from './contextual/locationFinder';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { logger, Logger } from './logging';
|
||||
import { generateSummarySymbolsFile } from './log-insights/summary-parser';
|
||||
import { asError } from './pure/helpers-pure';
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
* --------------
|
||||
*
|
||||
* Compiling and running QL queries.
|
||||
*/
|
||||
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
function findJsonQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary.jsonl');
|
||||
}
|
||||
|
||||
function findQueryEvalLogSummarySymbolsFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary.symbols.json');
|
||||
}
|
||||
|
||||
function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
|
||||
|
||||
export class QueryEvaluationInfo {
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a QueryEvaluationInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
public readonly querySaveDir: string,
|
||||
public readonly dbItemPath: string,
|
||||
private readonly databaseHasMetadataFile: boolean,
|
||||
public readonly quickEvalPosition?: messages.Position,
|
||||
public readonly metadata?: QueryMetadata,
|
||||
) {
|
||||
/**/
|
||||
}
|
||||
|
||||
get dilPath() {
|
||||
return path.join(this.querySaveDir, 'results.dil');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path that the compiled query is if it exists. Note that it only exists when using the legacy query server.
|
||||
*/
|
||||
get compileQueryPath() {
|
||||
return path.join(this.querySaveDir, 'compiledQuery.qlo');
|
||||
}
|
||||
|
||||
get csvPath() {
|
||||
return path.join(this.querySaveDir, 'results.csv');
|
||||
}
|
||||
|
||||
get logPath() {
|
||||
return findQueryLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogPath() {
|
||||
return findQueryEvalLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogSummaryPath() {
|
||||
return findQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get jsonEvalLogSummaryPath() {
|
||||
return findJsonQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogSummarySymbolsPath() {
|
||||
return findQueryEvalLogSummarySymbolsFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogEndSummaryPath() {
|
||||
return findQueryEvalLogEndSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, 'results.bqrs'),
|
||||
interpretedResultsPath: path.join(this.querySaveDir,
|
||||
this.metadata?.kind === 'graph'
|
||||
? 'graphResults'
|
||||
: 'interpretedResults.sarif'
|
||||
),
|
||||
};
|
||||
}
|
||||
getSortedResultSetPath(resultSetName: string) {
|
||||
return path.join(this.querySaveDir, `sortedResults-${resultSetName}.bqrs`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the query directory that indicates when this query was created.
|
||||
* This is important for keeping track of when queries should be removed.
|
||||
*/
|
||||
async createTimestampFile() {
|
||||
await createTimestampFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Holds if this query can in principle produce interpreted results.
|
||||
*/
|
||||
canHaveInterpretedResults(): boolean {
|
||||
if (!this.databaseHasMetadataFile) {
|
||||
void logger.log('Cannot produce interpreted results since the database does not have a .dbinfo or codeql-database.yml file.');
|
||||
return false;
|
||||
}
|
||||
|
||||
const kind = this.metadata?.kind;
|
||||
const hasKind = !!kind;
|
||||
if (!hasKind) {
|
||||
void logger.log('Cannot produce interpreted results since the query does not have @kind metadata.');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Graph queries only return interpreted results if we are in canary mode.
|
||||
if (kind === 'graph') {
|
||||
return config.isCanary();
|
||||
}
|
||||
|
||||
// table is the default query kind. It does not produce interpreted results.
|
||||
// any query kind that is not table can, in principle, produce interpreted results.
|
||||
return kind !== 'table';
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query actually has produced interpreted results.
|
||||
*/
|
||||
async hasInterpretedResults(): Promise<boolean> {
|
||||
return fs.pathExists(this.resultsPaths.interpretedResultsPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has DIL produced
|
||||
*/
|
||||
async hasDil(): Promise<boolean> {
|
||||
return fs.pathExists(this.dilPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has CSV results produced
|
||||
*/
|
||||
async hasCsv(): Promise<boolean> {
|
||||
return fs.pathExists(this.csvPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path to the DIL file produced by this query. If the query has not yet produced DIL,
|
||||
* this will return first create the DIL file and then return the path to the DIL file.
|
||||
*/
|
||||
async ensureDilPath(cliServer: CodeQLCliServer): Promise<string> {
|
||||
if (await this.hasDil()) {
|
||||
return this.dilPath;
|
||||
}
|
||||
const compiledQuery = path.join(this.querySaveDir, 'compiledQuery.qlo');
|
||||
if (!(await fs.pathExists(compiledQuery))) {
|
||||
if (await cliServer.cliConstraints.supportsNewQueryServer()) {
|
||||
// This could be from the new query server
|
||||
// in which case we expect the qlo to be missing so we should ignore it
|
||||
throw new Error(
|
||||
`DIL was not found. ${compiledQuery}`
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Cannot create DIL because compiled query is missing. ${compiledQuery}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await cliServer.generateDil(compiledQuery, this.dilPath);
|
||||
return this.dilPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has a completed structured evaluator log
|
||||
*/
|
||||
async hasEvalLog(): Promise<boolean> {
|
||||
return fs.pathExists(this.evalLogPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the structured evaluator log to the query evaluation info.
|
||||
*/
|
||||
async addQueryLogs(queryInfo: LocalQueryInfo, cliServer: CodeQLCliServer, logger: Logger) {
|
||||
queryInfo.evalLogLocation = this.evalLogPath;
|
||||
queryInfo.evalLogSummaryLocation = await this.generateHumanReadableLogSummary(cliServer);
|
||||
void this.logEndSummary(queryInfo.evalLogSummaryLocation, logger); // Logged asynchrnously
|
||||
if (config.isCanary()) { // Generate JSON summary for viewer.
|
||||
await cliServer.generateJsonLogSummary(this.evalLogPath, this.jsonEvalLogSummaryPath);
|
||||
queryInfo.jsonEvalLogSummaryLocation = this.jsonEvalLogSummaryPath;
|
||||
await generateSummarySymbolsFile(this.evalLogSummaryPath, this.evalLogSummarySymbolsPath);
|
||||
queryInfo.evalLogSummarySymbolsLocation = this.evalLogSummarySymbolsPath;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the appropriate CLI command to generate a human-readable log summary.
|
||||
* @param qs The query server client.
|
||||
* @returns The path to the log summary, or `undefined` if the summary could not be generated. */
|
||||
private async generateHumanReadableLogSummary(cliServer: CodeQLCliServer): Promise<string | undefined> {
|
||||
try {
|
||||
await cliServer.generateLogSummary(this.evalLogPath, this.evalLogSummaryPath, this.evalLogEndSummaryPath);
|
||||
return this.evalLogSummaryPath;
|
||||
} catch (e) {
|
||||
const err = asError(e);
|
||||
void showAndLogWarningMessage(`Failed to generate human-readable structured evaluator log summary. Reason: ${err.message}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs the end summary to the Output window and log file.
|
||||
* @param logSummaryPath Path to the human-readable log summary
|
||||
* @param qs The query server client.
|
||||
*/
|
||||
private async logEndSummary(logSummaryPath: string | undefined, logger: Logger): Promise<void> {
|
||||
if (logSummaryPath === undefined) {
|
||||
// Failed to generate the log, so we don't expect an end summary either.
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const endSummaryContent = await fs.readFile(this.evalLogEndSummaryPath, 'utf-8');
|
||||
void logger.log(' --- Evaluator Log Summary --- ', { additionalLogLocation: this.logPath });
|
||||
void logger.log(endSummaryContent, { additionalLogLocation: this.logPath });
|
||||
} catch (e) {
|
||||
void showAndLogWarningMessage(`Could not read structured evaluator log end of summary file at ${this.evalLogEndSummaryPath}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CSV file containing the results of this query. This will only be called if the query
|
||||
* does not have interpreted results and the CSV file does not already exist.
|
||||
*
|
||||
* @return Promise<true> if the operation creates the file. Promise<false> if the operation does
|
||||
* not create the file.
|
||||
*
|
||||
* @throws Error if the operation fails.
|
||||
*/
|
||||
async exportCsvResults(cliServer: CodeQLCliServer, csvPath: string): Promise<boolean> {
|
||||
const resultSet = await this.chooseResultSet(cliServer);
|
||||
if (!resultSet) {
|
||||
void showAndLogWarningMessage('Query has no result set.');
|
||||
return false;
|
||||
}
|
||||
let stopDecoding = false;
|
||||
const out = fs.createWriteStream(csvPath);
|
||||
|
||||
const promise: Promise<boolean> = new Promise((resolve, reject) => {
|
||||
out.on('finish', () => resolve(true));
|
||||
out.on('error', () => {
|
||||
if (!stopDecoding) {
|
||||
stopDecoding = true;
|
||||
reject(new Error(`Failed to write CSV results to ${csvPath}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let nextOffset: number | undefined = 0;
|
||||
do {
|
||||
const chunk: DecodedBqrsChunk = await cliServer.bqrsDecode(this.resultsPaths.resultsPath, resultSet, {
|
||||
pageSize: 100,
|
||||
offset: nextOffset,
|
||||
});
|
||||
chunk.tuples.forEach((tuple) => {
|
||||
out.write(tuple.map((v, i) =>
|
||||
chunk.columns[i].kind === 'String'
|
||||
? `"${typeof v === 'string' ? v.replaceAll('"', '""') : v}"`
|
||||
: v
|
||||
).join(',') + '\n');
|
||||
});
|
||||
nextOffset = chunk.next;
|
||||
} while (nextOffset && !stopDecoding);
|
||||
out.end();
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose the name of the result set to run. If the `#select` set exists, use that. Otherwise,
|
||||
* arbitrarily choose the first set. Most of the time, this will be correct.
|
||||
*
|
||||
* If the query has no result sets, then return undefined.
|
||||
*/
|
||||
async chooseResultSet(cliServer: CodeQLCliServer) {
|
||||
const resultSets = (await cliServer.bqrsInfo(this.resultsPaths.resultsPath, 0))['result-sets'];
|
||||
if (!resultSets.length) {
|
||||
return undefined;
|
||||
}
|
||||
if (resultSets.find(r => r.name === SELECT_QUERY_NAME)) {
|
||||
return SELECT_QUERY_NAME;
|
||||
}
|
||||
return resultSets[0].name;
|
||||
}
|
||||
/**
|
||||
* Returns the path to the CSV alerts interpretation of this query results. If CSV results have
|
||||
* not yet been produced, this will return first create the CSV results and then return the path.
|
||||
*
|
||||
* This method only works for queries with interpreted results.
|
||||
*/
|
||||
async ensureCsvAlerts(cliServer: CodeQLCliServer, dbm: DatabaseManager): Promise<string> {
|
||||
if (await this.hasCsv()) {
|
||||
return this.csvPath;
|
||||
}
|
||||
|
||||
const dbItem = dbm.findDatabaseItem(Uri.file(this.dbItemPath));
|
||||
if (!dbItem) {
|
||||
throw new Error(`Cannot produce CSV results because database is missing. ${this.dbItemPath}`);
|
||||
}
|
||||
|
||||
let sourceInfo;
|
||||
if (dbItem.sourceArchive !== undefined) {
|
||||
sourceInfo = {
|
||||
sourceArchive: dbItem.sourceArchive.fsPath,
|
||||
sourceLocationPrefix: await dbItem.getSourceLocationPrefix(
|
||||
cliServer
|
||||
),
|
||||
};
|
||||
}
|
||||
await cliServer.generateResultsCsv(ensureMetadataIsComplete(this.metadata), this.resultsPaths.resultsPath, this.csvPath, sourceInfo);
|
||||
return this.csvPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans this query's results directory.
|
||||
*/
|
||||
async deleteQuery(): Promise<void> {
|
||||
await fs.remove(this.querySaveDir);
|
||||
}
|
||||
}
|
||||
|
||||
export interface QueryWithResults {
|
||||
readonly query: QueryEvaluationInfo;
|
||||
readonly logFileLocation?: string;
|
||||
readonly dispose: () => void;
|
||||
readonly successful?: boolean;
|
||||
readonly message?: string;
|
||||
readonly result: legacyMessages.EvaluationResult;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Information about which query will be to be run. `quickEvalPosition` and `quickEvalText`
|
||||
* is only filled in if the query is a quick query.
|
||||
*/
|
||||
interface SelectedQuery {
|
||||
queryPath: string;
|
||||
quickEvalPosition?: messages.Position;
|
||||
quickEvalText?: string;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Determines which QL file to run during an invocation of `Run Query` or `Quick Evaluation`, as follows:
|
||||
* - If the command was called by clicking on a file, then use that file.
|
||||
* - Otherwise, use the file open in the current editor.
|
||||
* - In either case, prompt the user to save the file if it is open with unsaved changes.
|
||||
* - For `Quick Evaluation`, ensure the selected file is also the one open in the editor,
|
||||
* and use the selected region.
|
||||
* @param selectedResourceUri The selected resource when the command was run.
|
||||
* @param quickEval Whether the command being run is `Quick Evaluation`.
|
||||
*/
|
||||
export async function determineSelectedQuery(
|
||||
selectedResourceUri: Uri | undefined,
|
||||
quickEval: boolean,
|
||||
range?: Range
|
||||
): Promise<SelectedQuery> {
|
||||
const editor = window.activeTextEditor;
|
||||
|
||||
// Choose which QL file to use.
|
||||
let queryUri: Uri;
|
||||
if (selectedResourceUri) {
|
||||
// A resource was passed to the command handler, so use it.
|
||||
queryUri = selectedResourceUri;
|
||||
} else {
|
||||
// No resource was passed to the command handler, so obtain it from the active editor.
|
||||
// This usually happens when the command is called from the Command Palette.
|
||||
if (editor === undefined) {
|
||||
throw new Error('No query was selected. Please select a query and try again.');
|
||||
} else {
|
||||
queryUri = editor.document.uri;
|
||||
}
|
||||
}
|
||||
|
||||
if (queryUri.scheme !== 'file') {
|
||||
throw new Error('Can only run queries that are on disk.');
|
||||
}
|
||||
const queryPath = queryUri.fsPath;
|
||||
|
||||
if (quickEval) {
|
||||
if (!(queryPath.endsWith('.ql') || queryPath.endsWith('.qll'))) {
|
||||
throw new Error('The selected resource is not a CodeQL file; It should have the extension ".ql" or ".qll".');
|
||||
}
|
||||
} else {
|
||||
if (!(queryPath.endsWith('.ql'))) {
|
||||
throw new Error('The selected resource is not a CodeQL query file; It should have the extension ".ql".');
|
||||
}
|
||||
}
|
||||
|
||||
// Whether we chose the file from the active editor or from a context menu,
|
||||
// if the same file is open with unsaved changes in the active editor,
|
||||
// then prompt the user to save it first.
|
||||
if (editor !== undefined && editor.document.uri.fsPath === queryPath) {
|
||||
if (await promptUserToSaveChanges(editor.document)) {
|
||||
await editor.document.save();
|
||||
}
|
||||
}
|
||||
|
||||
let quickEvalPosition: messages.Position | undefined = undefined;
|
||||
let quickEvalText: string | undefined = undefined;
|
||||
if (quickEval) {
|
||||
if (editor == undefined) {
|
||||
throw new Error('Can\'t run quick evaluation without an active editor.');
|
||||
}
|
||||
if (editor.document.fileName !== queryPath) {
|
||||
// For Quick Evaluation we expect these to be the same.
|
||||
// Report an error if we end up in this (hopefully unlikely) situation.
|
||||
throw new Error('The selected resource for quick evaluation should match the active editor.');
|
||||
}
|
||||
quickEvalPosition = await getSelectedPosition(editor, range);
|
||||
if (!editor.selection?.isEmpty) {
|
||||
quickEvalText = editor.document.getText(editor.selection);
|
||||
} else {
|
||||
// capture the entire line if the user didn't select anything
|
||||
const line = editor.document.lineAt(editor.selection.active.line);
|
||||
quickEvalText = line.text.trim();
|
||||
}
|
||||
}
|
||||
|
||||
return { queryPath, quickEvalPosition, quickEvalText };
|
||||
}
|
||||
|
||||
|
||||
/** Gets the selected position within the given editor. */
|
||||
async function getSelectedPosition(editor: TextEditor, range?: Range): Promise<messages.Position> {
|
||||
const selectedRange = range || editor.selection;
|
||||
const pos = selectedRange.start;
|
||||
const posEnd = selectedRange.end;
|
||||
// Convert from 0-based to 1-based line and column numbers.
|
||||
return {
|
||||
fileName: await convertToQlPath(editor.document.fileName),
|
||||
line: pos.line + 1,
|
||||
column: pos.character + 1,
|
||||
endLine: posEnd.line + 1,
|
||||
endColumn: posEnd.character + 1
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Prompts the user to save `document` if it has unsaved changes.
|
||||
*
|
||||
* @param document The document to save.
|
||||
*
|
||||
* @returns true if we should save changes and false if we should continue without saving changes.
|
||||
* @throws UserCancellationException if we should abort whatever operation triggered this prompt
|
||||
*/
|
||||
async function promptUserToSaveChanges(document: TextDocument): Promise<boolean> {
|
||||
if (document.isDirty) {
|
||||
if (config.AUTOSAVE_SETTING.getValue()) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
const yesItem = { title: 'Yes', isCloseAffordance: false };
|
||||
const alwaysItem = { title: 'Always Save', isCloseAffordance: false };
|
||||
const noItem = { title: 'No (run version on disk)', isCloseAffordance: false };
|
||||
const cancelItem = { title: 'Cancel', isCloseAffordance: true };
|
||||
const message = 'Query file has unsaved changes. Save now?';
|
||||
const chosenItem = await window.showInformationMessage(
|
||||
message,
|
||||
{ modal: true },
|
||||
yesItem, alwaysItem, noItem, cancelItem
|
||||
);
|
||||
|
||||
if (chosenItem === alwaysItem) {
|
||||
await config.AUTOSAVE_SETTING.updateValue(true, ConfigurationTarget.Workspace);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (chosenItem === yesItem) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (chosenItem === cancelItem) {
|
||||
throw new UserCancellationException('Query run cancelled.', true);
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param filePath This needs to be equivalent to Java's `Path.toRealPath(NO_FOLLOW_LINKS)`
|
||||
*/
|
||||
async function convertToQlPath(filePath: string): Promise<string> {
|
||||
if (process.platform === 'win32') {
|
||||
|
||||
if (path.parse(filePath).root === filePath) {
|
||||
// Java assumes uppercase drive letters are canonical.
|
||||
return filePath.toUpperCase();
|
||||
} else {
|
||||
const dir = await convertToQlPath(path.dirname(filePath));
|
||||
const fileName = path.basename(filePath);
|
||||
const fileNames = await fs.readdir(dir);
|
||||
for (const name of fileNames) {
|
||||
// Leave the locale argument empty so that the default OS locale is used.
|
||||
// We do this because this operation works on filesystem entities, which
|
||||
// use the os locale, regardless of the locale of the running VS Code instance.
|
||||
if (fileName.localeCompare(name, undefined, { sensitivity: 'accent' }) === 0) {
|
||||
return path.join(dir, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error('Can\'t convert path to form suitable for QL:' + filePath);
|
||||
} else {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Determines the initial information for a query. This is everything of interest
|
||||
* we know about this query that is available before it is run.
|
||||
*
|
||||
* @param selectedQueryUri The Uri of the document containing the query to be run.
|
||||
* @param databaseInfo The database to run the query against.
|
||||
* @param isQuickEval true if this is a quick evaluation.
|
||||
* @param range the selection range of the query to be run. Only used if isQuickEval is true.
|
||||
* @returns The initial information for the query to be run.
|
||||
*/
|
||||
export async function createInitialQueryInfo(
|
||||
selectedQueryUri: Uri | undefined,
|
||||
databaseInfo: DatabaseInfo,
|
||||
isQuickEval: boolean,
|
||||
range?: Range
|
||||
): Promise<InitialQueryInfo> {
|
||||
// Determine which query to run, based on the selection and the active editor.
|
||||
const { queryPath, quickEvalPosition, quickEvalText } = await determineSelectedQuery(selectedQueryUri, isQuickEval, range);
|
||||
|
||||
return {
|
||||
queryPath,
|
||||
isQuickEval,
|
||||
isQuickQuery: isQuickQueryPath(queryPath),
|
||||
databaseInfo,
|
||||
id: `${path.basename(queryPath)}-${nanoid()}`,
|
||||
start: new Date(),
|
||||
... (isQuickEval ? {
|
||||
queryText: quickEvalText!, // if this query is quick eval, it must have quick eval text
|
||||
quickEvalPosition: quickEvalPosition
|
||||
} : {
|
||||
queryText: await fs.readFile(queryPath, 'utf8')
|
||||
})
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user